+ ./ya make . -T --test-size=small --test-size=medium --stat --test-threads 52 --link-threads 12 -DUSE_EAT_MY_DATA --build release --sanitize=address -DDEBUGINFO_LINES_ONLY --bazel-remote-store --bazel-remote-base-uri http://cachesrv.internal:8081 --bazel-remote-username cache_user --bazel-remote-password-file /tmp/tmp.EiLqPnONRG --bazel-remote-put --dist-cache-max-file-size=209715200 -A --retest --stat -DCONSISTENT_DEBUG --no-dir-outputs --test-failure-code 0 --build-all --cache-size 2TB --force-build-depends --log-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/ya_log.txt --evlog-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/ya_evlog.jsonl --junit /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/junit.xml --build-results-report /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/report.json --output /home/runner/actions_runner/_work/ydb/ydb/tmp/out Output root is subdirectory of Arcadia root, this may cause non-idempotent build Configuring dependencies for platform default-linux-x86_64-release-asan Configuring dependencies for platform tools [2 ymakes processing] [8061/8061 modules configured] [2810/4464 modules rendered] [2 ymakes processing] [8061/8061 modules configured] [4372/4464 modules rendered] [2 ymakes processing] [8061/8061 modules configured] [4464/4464 modules rendered] Configuring dependencies for platform test_tool_tc1-global [0 ymakes processing] [8067/8067 modules configured] [4464/4464 modules rendered] Configuring tests execution Configuring local and dist store caches Configuration done. Preparing for execution |33.3%| CLEANING SYMRES | 0.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc | 1.4%| PREPARE $(VCS) | 2.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/core/libyt-yt-core.a | 4.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace | 5.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libessentials-public-types.a | 5.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/libpy3python-MarkupSafe-py3.a | 5.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.context/libpy3contrib-python-jaraco.context.global.a | 5.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libpy3essentials-public-types.global.a | 5.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpy3public-issue-protos.global.a | 5.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libbz2/libcontrib-libs-libbz2.a | 6.0%| [AR] {BAZEL_DOWNLOAD} $(B)/certs/libcerts.global.a | 6.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/debugging/libabseil-cpp-absl-debugging.a | 6.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpublic-issue-protos.a | 8.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/liblibrary-cpp-logger.a | 9.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lcs/liblibrary-cpp-lcs.a | 9.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clang18-rt/lib/asan/libclang_rt.asan-x86_64.a |10.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/yson/libcpp-json-yson.a |10.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/json/proto/libprotobuf-json-proto.a |10.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/common/libcpp-json-common.a |10.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/util/proto/libprotobuf-util-proto.a |10.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/hyperscan/libcpp-regex-hyperscan.a |10.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/easy_parse/libcpp-json-easy_parse.a | 9.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/init/libcpp-openssl-init.a | 9.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/big_integer/libcpp-openssl-big_integer.a | 9.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/holders/libcpp-openssl-holders.a | 9.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/pcre/libcpp-regex-pcre.a | 9.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/crypto/libcpp-openssl-crypto.a |10.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lua/liblibrary-cpp-lua.a |10.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/l2_distance/liblibrary-cpp-l2_distance.a |10.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/analytics/liblwtrace-mon-analytics.a |10.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dwarf_backtrace/liblibrary-cpp-dwarf_backtrace.a |10.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/json/libcpp-protobuf-json.a |10.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/random_provider/liblibrary-cpp-random_provider.a |10.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/util/libcpp-protobuf-util.a |10.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sliding_window/liblibrary-cpp-sliding_window.a |10.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/enumbitset/liblibrary-cpp-enumbitset.a |10.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sse/liblibrary-cpp-sse.a |10.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/execprofile/liblibrary-cpp-execprofile.a |10.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/zc_memory_input/libcpp-streams-zc_memory_input.a |10.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/alloc_profiler/libcpp-lfalloc-alloc_profiler.a |10.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/liblibrary-cpp-getopt.global.a |11.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lzma/libcpp-streams-lzma.a |11.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/zstd/libcpp-streams-zstd.a |11.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/client/libyt-yt-client.a |11.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base64/libcpp-string_utils-base64.a |11.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/csv/libcpp-string_utils-csv.a |11.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxx/liblibs-cxxsupp-libcxx.a |11.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/indent_text/libcpp-string_utils-indent_text.a |11.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/undumpable/libyt-library-undumpable.a |11.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/url_mapper/libyt-lib-url_mapper.a |11.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/on_disk/chunks/libcpp-on_disk-chunks.a |11.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/arrow/fbs/libclient-arrow-fbs.a |11.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/parse_size/libcpp-string_utils-parse_size.a |11.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/integration/proto/libytflow-integration-proto.a |11.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/query_tracker_client/libyt-client-query_tracker_client.a |12.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/build/libyt-yt-build.a |12.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/proto/libproviders-yt-proto.a |12.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/skiff/libyt-lib-skiff.a |12.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/liblibrary-cpp-lwtrace.a |12.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/mkql_dq/libproviders-yt-mkql_dq.a |12.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |12.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/arrow/libyt-client-arrow.a |12.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/lexer_common/libessentials-parser-lexer_common.a |12.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yt_download/libyt-lib-yt_download.a |12.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yson_helpers/libyt-lib-yson_helpers.a |12.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/small/libcpp-getopt-small.a |13.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/libcpp-lwtrace-mon.a |14.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/hyperscan/libjsonpath-rewrapper-hyperscan.global.a |14.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/expr_nodes/libproviders-ytflow-expr_nodes.a |15.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/libessentials-parser-pg_catalog.global.a |15.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/codec/libproviders-common-codec.a |15.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/antlr4/libparser-proto_ast-antlr4.a |15.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.global.a |15.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/provider/libproviders-result-provider.a |15.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/activation/libproviders-common-activation.a |15.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/proto/libparser-pg_catalog-proto.a |15.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/opt/libproviders-yt-opt.a |15.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_wrapper/interface/libparser-pg_wrapper-interface.a |15.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/antlr3/libparser-proto_ast-antlr3.a |15.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/libminikql-jsonpath-rewrapper.a |16.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/common/libessentials-parser-common.a |16.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/common/antlr4/libparser-common-antlr4.a |16.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/formats/libyt-client-formats.a |16.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/gateway/libproviders-common-gateway.a |16.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/arrow_resolve/libproviders-common-arrow_resolve.a |16.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/proto/libjsonpath-rewrapper-proto.a |17.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/dq/libproviders-common-dq.a |17.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/re2/libjsonpath-rewrapper-re2.global.a |17.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/libessentials-parser-pg_catalog.a |17.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/codec/arrow/libcommon-codec-arrow.a |17.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/html/escape/libcpp-html-escape.a |17.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/libessentials-minikql-jsonpath.a |17.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/parser/libminikql-jsonpath-parser.a |17.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/gateways_utils/libproviders-common-gateways_utils.a |17.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/config/libproviders-common-config.a |18.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/metrics/libproviders-common-metrics.a |17.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/metrics/protos/libcommon-metrics-protos.a |18.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/libproviders-yt-codec.a |18.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/jsonpath/libproto_ast-gen-jsonpath.a |18.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/libproviders-common-schema.a |18.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/structured_token/libproviders-common-structured_token.a |18.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/mkql/libproviders-common-mkql.a |18.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/https/libyt-core-https.a |17.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/expr/libcommon-schema-expr.a |17.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/core/libyt-yt-core.a |17.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/expr_nodes/libproviders-pg-expr_nodes.a |18.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/comp_nodes/libproviders-common-comp_nodes.a |18.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/parser/libcommon-schema-parser.a |18.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/mkql/libcommon-schema-mkql.a |18.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v0/libproto_ast-gen-v0.a |18.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/config/libessentials-providers-config.a |18.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/hdr/libcpp-histogram-hdr.a |18.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/misc/isa_crc64/libisa-l_crc_yt_patch.a |18.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/decimal/libessentials-public-decimal.a |18.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/arrow/libpublic-udf-arrow.a |18.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/transform/libproviders-common-transform.a |18.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/expr_nodes/libproviders-result-expr_nodes.a |18.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/result_format/libessentials-public-result_format.a |18.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/udf_resolve/libproviders-common-udf_resolve.a |19.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/provider/libproviders-pg-provider.a |19.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/langver/libessentials-public-langver.a |19.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/libessentials-public-udf.a |19.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/scan/libcpp-string_utils-scan.a |19.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/skiff/libcommon-schema-skiff.a |19.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/auth/libyt-library-auth.a |19.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/exception_policy/libudf-service-exception_policy.global.a |19.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/url/libcpp-string_utils-url.a |19.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/support/libpublic-udf-support.a |19.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/libyql-essentials-minikql.a |19.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/erasure/libyt-library-erasure.a |19.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/liblibrary-cpp-retry.a |19.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/decimal/libyt-library-decimal.a |19.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/column_converters/libyt-library-column_converters.a |19.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/numeric/libyt-library-numeric.a |19.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/tz/libpublic-udf-tz.a |20.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/pg_dummy/libessentials-sql-pg_dummy.a |20.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/libyql-essentials-sql.a |20.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/libyt-yt-core.global.a |20.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/provider/libproviders-common-provider.a |20.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v0/lexer/libsql-v0-lexer.a |20.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/settings/libessentials-sql-settings.a |20.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/dbg_info/libcpp-lfalloc-dbg_info.a |20.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/liblibrary-cpp-logger.global.a |20.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sighandler/liblibrary-cpp-sighandler.a |20.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/protos/libcpp-retry-protos.a |20.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/libcpp-lwtrace-mon.global.a |20.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/procfs/libyt-library-procfs.a |21.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/http/libyt-core-http.a |21.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/libsql-v1-format.global.a |21.4%| PREPARE $(YMAKE_PYTHON3-4256832079) |21.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_ansi/libv1-lexer-antlr4_ansi.a |22.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr3/libv1-lexer-antlr3.a |22.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/libsql-v1-format.a |22.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr3_ansi/libv1-lexer-antlr3_ansi.a |22.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/resource_tracker/liblibrary-profiling-resource_tracker.a |22.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/html/pcdata/libcpp-html-pcdata.a |22.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4/libv1-lexer-antlr4.a |23.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/scheme/liblibrary-cpp-scheme.a |23.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/libsql-v1-lexer.a |23.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/resource_tracker/liblibrary-profiling-resource_tracker.global.a |23.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/re2/libyt-library-re2.a |23.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/libyt-library-profiling.a |23.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/skiff/liblibrary-cpp-skiff.a |23.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr3/libv1-proto_parser-antlr3.a |23.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr3_ansi/libv1-proto_parser-antlr3_ansi.a |23.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/quantile_digest/libyt-library-quantile_digest.a |23.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/bzip2/libcpp-streams-bzip2.a |23.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr4_ansi/libv1-proto_parser-antlr4_ansi.a |23.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/signals/libyt-library-signals.a |23.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_antlr4/libproto_ast-gen-v1_antlr4.a |23.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/uuid/libessentials-types-uuid.a |23.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/brotli/libcpp-streams-brotli.a |23.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr4/libv1-proto_parser-antlr4.a |23.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/fast_sax/libcpp-json-fast_sax.a |23.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/libsql-v1-proto_parser.a |23.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/dynumber/libessentials-types-dynumber.a |23.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/backtrace/libessentials-utils-backtrace.a |24.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/binary_json/libessentials-types-binary_json.a |24.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tvm/libyt-library-tvm.a |24.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tracing/libyt-library-tracing.a |24.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/ytprof/api/liblibrary-ytprof-api.a |24.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_ansi_antlr4/libproto_ast-gen-v1_ansi_antlr4.a |24.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/failure_injector/libessentials-utils-failure_injector.a |24.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/fetch/libessentials-utils-fetch.a |24.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/protos/libcpp-lwtrace-protos.a |24.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/native/libyt-gateway-native.a |24.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/libyql-essentials-utils.a |24.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/server/libcpp-http-server.a |24.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/skiff_ext/libyt-library-skiff_ext.a |24.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/formats/libyt_proto-yt-formats.a |24.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/misc/libcpp-http-misc.a |24.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/libessentials-utils-log.a |24.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/fetch/libcpp-http-fetch.a |24.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/codegen/llvm16/libcodec-codegen-llvm16.a |25.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/threading/libessentials-utils-threading.a |25.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_ansi/libproto_ast-gen-v1_ansi.a |25.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/providers/stat/expr_nodes/libproviders-stat-expr_nodes.a |25.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/common/libcpp-mapreduce-common.a |25.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1/libproto_ast-gen-v1.a |25.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/libessentials-public-issue.a |25.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v0_proto_split/libproto_ast-gen-v0_proto_split.a |25.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/global/libcpp-logger-global.a |25.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/core/libyt_proto-yt-core.a |25.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/libfq-libs-config.a |26.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/simple/libcpp-http-simple.a |26.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/levenshtein_diff/libcpp-string_utils-levenshtein_diff.a |26.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/interface/logging/libmapreduce-interface-logging.a |26.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/res_pull/libyt-lib-res_pull.a |26.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/library/user_job_statistics/libmapreduce-library-user_job_statistics.a |26.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/io/libcpp-http-io.a |26.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/proto/libutils-log-proto.a |26.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ytalloc/api/libcpp-ytalloc-api.a |26.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/codegen/llvm16/libcodec-codegen-llvm16.global.a |26.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/quote/libcpp-string_utils-quote.a |26.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/synchronization_service/libcompute-ydb-synchronization_service.a |26.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/int128/liblibrary-cpp-int128.a |26.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/http/libcpp-mapreduce-http.a |27.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/io/libcpp-mapreduce-io.a |27.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/integration/interface/libytflow-integration-interface.a |27.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/expr_nodes/libproviders-yt-expr_nodes.a |27.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/relaxed_escaper/libcpp-string_utils-relaxed_escaper.a |27.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/http_client/libcpp-mapreduce-http_client.a |27.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/s3transfer/py3/libpy3python-s3transfer-py3.global.a |27.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/libcpp-monlib-service.a |27.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/responses/py3/libpy3python-responses-py3.global.a |27.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/libpy3python-numpy-py3.global.a |27.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/moto/py3/libpy3python-moto-py3.global.a |27.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ipv6_address/liblibrary-cpp-ipv6_address.a |27.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/libmonlib-service-pages.a |27.6%| PREPARE $(LLD_ROOT-3808007503) |27.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/unittest/libcpp-testing-unittest.a |27.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/config_clusters/libyt-lib-config_clusters.a |27.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ipmath/liblibrary-cpp-ipmath.a |28.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/resources/libservice-pages-resources.global.a |28.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/numpy/random/libpy3py3-numpy-random.global.a |28.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/writer/libcpp-json-writer.a |28.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/comp_nodes/llvm16/libyt-comp_nodes-llvm16.a |28.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/tablesorter/libservice-pages-tablesorter.global.a |28.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/xmltodict/py3/libpy3python-xmltodict-py3.global.a |28.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pytz/py3/libpy3python-pytz-py3.global.a |28.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/qplayer/libyt-gateway-qplayer.a |28.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/ztstrbuf/libcpp-string_utils-ztstrbuf.a |28.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/io/libcpp-openssl-io.a |28.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/liblibrary-cpp-json.a |28.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v0/libessentials-sql-v0.a |28.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/expr_traits/libyt-lib-expr_traits.a |28.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/job/libproviders-yt-job.a |28.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/graph_reorder/libyt-lib-graph_reorder.a |28.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/hash/libyt-lib-hash.a |28.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/lib/libyt-gateway-lib.a |28.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/datetime/libessentials-minikql-datetime.a |28.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/common/libproviders-yt-common.a |28.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |28.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/interface/libqplayer-storage-interface.a |28.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/udf_resolver/libcore-qplayer-udf_resolver.a |28.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/minsketch/libessentials-core-minsketch.a |28.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/infer_schema/libyt-lib-infer_schema.a |28.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/computation/llvm16/libminikql-computation-llvm16.a |28.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/libessentials-core-issue.global.a |28.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/langver/libessentials-core-langver.a |28.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/dom/libessentials-minikql-dom.a |28.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/libessentials-core-issue.a |28.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/pg_settings/libessentials-core-pg_settings.a |28.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/sql_types/libessentials-core-sql_types.a |29.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_preprocessing/interface/libcore-url_preprocessing-interface.a |29.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/mounts/libcore-services-mounts.global.a |29.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_lister/interface/libcore-url_lister-interface.a |29.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/comp_nodes/dq/llvm16/libcomp_nodes-dq-llvm16.a |29.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/mounts/libcore-services-mounts.a |29.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/histogram/libessentials-core-histogram.a |29.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/codegen/llvm16/libminikql-codegen-llvm16.a |29.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/arrow/libessentials-minikql-arrow.a |29.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/user_data/libessentials-core-user_data.a |29.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/packedtypes/liblibrary-cpp-packedtypes.a |29.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/client/libcpp-mapreduce-client.a |29.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/key_filter/libyt-lib-key_filter.a |29.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/method/libcpp-openssl-method.a |29.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/init_yt_api/libyt-lib-init_yt_api.a |30.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/log/libyt-lib-log.a |30.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/interop/libcpp-protobuf-interop.a |30.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/lambda_builder/libyt-lib-lambda_builder.a |30.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/mkql_helpers/libyt-lib-mkql_helpers.a |30.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/packers/liblibrary-cpp-packers.a |30.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/libessentials-core-file_storage.a |30.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/psutil/py3/libpy3python-psutil-py3.a |30.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/http_download/proto/libfile_storage-http_download-proto.a |30.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/http_download/libcore-file_storage-http_download.a |30.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/schema/libyt-lib-schema.a |30.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/download/libcore-file_storage-download.a |30.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/libessentials-core-services.a |30.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/iterator/liblibrary-cpp-iterator.a |30.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/defs/libcore-file_storage-defs.a |30.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes_gen/libessentials-core-expr_nodes_gen.a |30.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libpy3providers-common-proto.global.a |30.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/psutil/py3/libpy3python-psutil-py3.global.a |31.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/numpy/random/libpy3py3-numpy-random.a |31.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libf2c/libcontrib-libs-libf2c.a |31.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/allure-python-commons/libpy3contrib-python-allure-python-commons.global.a |31.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jmespath/py3/libpy3python-jmespath-py3.global.a |31.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/boto3/py3/libpy3python-boto3-py3.global.a |31.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Werkzeug/py3/libpy3python-Werkzeug-py3.global.a |31.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/system_attribute_provider.cpp |31.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/row_spec/libyt-lib-row_spec.a |31.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/formats/libyt-library-formats.a |31.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/client/libyt_proto-yt-client.a |31.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/static_service_dispatcher.cpp |31.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/facade/libessentials-core-facade.a |31.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/size.cpp |31.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/consumer.cpp |31.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/interface/libcpp-mapreduce-interface.a |31.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/syntax_checker.cpp |31.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cblas/libcontrib-libs-cblas.a |31.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/extract_predicate/libessentials-core-extract_predicate.a |32.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/null_consumer.cpp |32.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/lexer.cpp |32.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/list_verb_lazy_yson_consumer.cpp |32.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser.cpp |32.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_unknown_fields.cpp |32.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_options.cpp |32.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/arrow_kernels/request/libcore-arrow_kernels-request.a |32.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token.cpp |32.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/stream.cpp |32.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop.cpp |32.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string.cpp |32.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/producer.cpp |32.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser_deserialize.cpp |32.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_apply.cpp |32.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_consumer.cpp |32.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_append.cpp |32.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_aggrcount.cpp |32.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/peephole_opt/libessentials-core-peephole_opt.a |32.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_addmember.cpp |33.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/abstract/libservices-metadata-abstract.a |33.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_just.cpp |33.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/libydb-services-metadata.a |33.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/docs/generator/generator |33.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/common/libservices-metadata-common.a |33.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/monitoring/libydb-services-monitoring.a |33.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/view/libydb-services-view.a |33.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bg_tasks/abstract/libservices-bg_tasks-abstract.a |33.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/sharding/libservices-lib-sharding.a |33.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/local_discovery/libydb-services-local_discovery.a |33.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bg_tasks/protos/libservices-bg_tasks-protos.a |33.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/iam/libpy3client-yc_public-iam.global.a |33.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/common/libpy3client-yc_public-common.global.a |33.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/libessentials-sql-v1.a |34.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/fatal_error_handlers/libclient-types-fatal_error_handlers.a |33.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/exceptions/libclient-types-exceptions.a |33.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/actors/libservices-lib-actors.a |34.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/computation/libessentials-minikql-computation.a |34.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_cluster_discovery/libydb-services-persqueue_cluster_discovery.a |34.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/operation/libclient-types-operation.a |34.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/tablet/libydb-services-tablet.a |34.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/libsrc-client-topic.a |34.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/libclient-types-credentials.a |34.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/resources/libsrc-client-resources.global.a |34.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/resources/libsrc-client-resources.a |34.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/value/libsrc-client-value.a |34.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/type_ann/libessentials-core-type_ann.a |34.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/params/libsrc-client-params.a |34.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/maintenance/libydb-services-maintenance.a |34.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/make_request/libimpl-ydb_internal-make_request.a |34.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/ds_table/libservices-metadata-ds_table.a |34.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/initializer/libservices-metadata-initializer.a |34.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/common_client/impl/libclient-common_client-impl.a |34.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/adapters/issue/libcpp-adapters-issue.a |35.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/include/ydb-cpp-sdk/client/topic/libydb-cpp-sdk-client-topic.a |35.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/kqp_session_common/libimpl-ydb_internal-kqp_session_common.a |35.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/import/libsrc-client-import.a |35.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/cms/libydb-services-cms.a |35.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/backup/libydb-services-backup.a |35.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/botocore/py3/libpy3python-botocore-py3.global.a |35.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/operation/libsrc-client-operation.a |35.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/files/libydb_cli-dump-files.a |35.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/common/yql_parser/libydb_cli-common-yql_parser.a |35.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/value/libpublic-lib-value.a |35.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/query/libsrc-client-query.a |35.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/servicecontrol/libclient-yc_private-servicecontrol.a |35.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/common/libclient-yc_public-common.a |35.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/yson_value/libpublic-lib-yson_value.a |35.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/expr_nodes/libproviders-solomon-expr_nodes.a |35.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/gateway/libproviders-solomon-gateway.a |35.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/json_value/libpublic-lib-json_value.a |35.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/events/libproviders-solomon-events.a |35.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/cm_client/libproviders-pq-cm_client.a |35.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/common/libproviders-pq-common.a |36.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/events/libclient-yc_public-events.a |36.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/statistics/libproviders-s3-statistics.a |36.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/solomon_accessor/client/libsolomon-solomon_accessor-client.a |36.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/util/libydb_cli-dump-util.a |36.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/events/libproviders-s3-events.a |36.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/proto/libproviders-pq-proto.a |36.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/pushdown/libproviders-generic-pushdown.a |36.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/range_helpers/libproviders-s3-range_helpers.a |36.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/impl/libclient-table-impl.a |36.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/object_listers/libproviders-s3-object_listers.a |36.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/path_generator/libproviders-s3-path_generator.a |36.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/gateway/native/libpq-gateway-native.a |36.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_proto_split/libproto_ast-gen-v1_proto_split.a |36.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ymq/libydb-services-ymq.a |36.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/libconnector-api-service.a |36.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/proto/libproviders-generic-proto.a |36.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/task_runner_actor/libproviders-dq-task_runner_actor.a |36.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/libcpp/libgeneric-connector-libcpp.a |36.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/expr_nodes/libproviders-generic-expr_nodes.a |37.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/worker_manager/interface/libdq-worker_manager-interface.a |36.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/arrow/interface/libcommon-arrow-interface.a |37.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/actors/libproviders-solomon-actors.a |37.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/worker_manager/libproviders-dq-worker_manager.a |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/expr_nodes/libproviders-pq-expr_nodes.a |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/tasks/libyql-dq-tasks.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/runtime/libproviders-dq-runtime.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/counters/libproviders-dq-counters.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/common/libproviders-s3-common.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/actors_factory/libproviders-s3-actors_factory.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/task_meta/libproviders-pq-task_meta.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/async_io/libproviders-pq-async_io.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/libclient-yc_private-iam.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/actors/libproviders-generic-actors.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/serializations/libproviders-s3-serializations.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/libpy3python-numpy-py3.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/libyql-essentials-core.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/experimental/libpublic-lib-experimental.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schemu/liblibrary-schlab-schemu.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/mkql/libproviders-dq-mkql.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/compressors/libproviders-s3-compressors.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/impl/libclient-persqueue_public-impl.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/spilling/libdq-actors-spilling.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ncloud/impl/liblibrary-ncloud-impl.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/public/ydb_issue/libyql-public-ydb_issue.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/splitter/liblibrary-formats-arrow-splitter.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/DataStreams/liblibrary-arrow_clickhouse-DataStreams.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/modifier/liblibrary-formats-arrow-modifier.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/task_runner/libproviders-dq-task_runner.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/Common/liblibrary-arrow_clickhouse-Common.a |37.6%| PREPARE $(PYTHON) |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/expr_nodes/libproviders-s3-expr_nodes.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/common/libcommon.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/proto/libproviders-ydb-proto.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/accessservice/libclient-nc_private-accessservice.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/expr_nodes/libproviders-ydb-expr_nodes.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/mock/libactors-interconnect-mock.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/helpers/liblibrary-actors-helpers.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/dnsresolver/liblibrary-actors-dnsresolver.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actors/libyql-utils-actors.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actor_log/libyql-utils-actor_log.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/dnscachelib/liblibrary-actors-dnscachelib.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/solomon_accessor/grpc/libsolomon-solomon_accessor-grpc.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/ut_helpers/libcore-wrappers-ut_helpers.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/operation/libclient-yc_private-operation.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/provider/libproviders-dq-provider.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/out/libapi-protos-out.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/base/libpublic-lib-base.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/plan/libyql-utils-plan.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/json/libcore-viewer-json.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/accessservice/libclient-yc_private-accessservice.a |38.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/http/libcore-ymq-http.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/libcore-tx-tiering.a |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/JSON.cpp |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/iam/libclient-yc_public-iam.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/sdk_core_access/libydb_sdk_core_access.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/ttl/libschemeshard-olap-ttl.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tracing/service/libtx-tracing-service.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/table/libschemeshard-olap-table.a |38.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg.cpp |38.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnConst.cpp |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/deprecated/client/liblib-deprecated-client.a |38.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnCompressed.cpp |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/provider/libproviders-generic-provider.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/proto/libproviders-solomon-proto.a |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/AggregateFunctions/IAggregateFunction.cpp |38.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |36.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/StringRef.cpp |36.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/actors/libproviders-s3-actors.a |36.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnArray.cpp |36.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceproxy/libcore-tx-sequenceproxy.a |36.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |37.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/AggregateFunctions/AggregateFunctionCombinatorFactory.cpp |37.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/resourcemanager/libclient-yc_private-resourcemanager.a |37.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/deprecated/kicli/liblib-deprecated-kicli.a |37.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/demangle.cpp |37.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressedWriteBuffer.cpp |37.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressedReadBuffer.cpp |37.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |37.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnAggregateFunction.cpp |37.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/AggregateFunctions/AggregateFunctionFactory.cpp |37.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressedReadBufferBase.cpp |37.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/DateLUTImpl.cpp |37.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |37.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/thread_local_rng.cpp |37.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/randomSeed.cpp |37.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/hasLinuxCapability.cpp |37.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/IDataType.cpp |37.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/getNumberOfPhysicalCPUCores.cpp |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/provider/libproviders-pq-provider.a |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/IntervalKind.cpp |37.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationCustomSimpleText.cpp |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeNested.cpp |37.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationAggregateFunction.cpp |37.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationEnum.cpp |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/provider/libproviders-ydb-provider.a |38.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/EnumValues.cpp |38.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/BlockInfo.cpp |38.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypesNumber.cpp |38.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeTuple.cpp |38.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeString.cpp |38.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/setThreadName.cpp |38.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypesDecimal.cpp |38.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeNumberBase.cpp |38.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDecimalBase.cpp |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeNullable.cpp |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDecimal.cpp |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDateTime64.cpp |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/ISerialization.cpp |38.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeMap.cpp |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeLowCardinalityHelpers.cpp |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationArray.cpp |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDateTime64.cpp |38.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeArray.cpp |38.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeAggregateFunction.cpp |38.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/ExecutionSpeedLimits.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/LZ4_decompress_faster.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/NativeBlockOutputStream.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/BlockStreamProfileInfo.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressionCodecNone.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/ICompressionCodec.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/ColumnWithTypeAndName.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/ColumnGathererStream.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/Block.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressionFactory.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMapReadBufferFromFileDescriptor.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMapReadBufferFromFile.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/AsynchronousReadBufferFromFile.cpp |39.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/IProcessor.cpp |39.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ISink.cpp |39.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ArrowBlockInputFormat.cpp |39.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/OutputStreamToOutputFormat.cpp |39.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/TSKVRowOutputFormat.cpp |39.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/RowInputFormatWithDiagnosticInfo.cpp |39.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ORCBlockInputFormat.cpp |39.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/CSVRowOutputFormat.cpp |39.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ArrowBufferedStreams.cpp |39.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserShowGrantsQuery.cpp |39.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/IRowOutputFormat.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/IRowInputFormat.cpp |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/provider/libproviders-solomon-provider.a |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/IInputFormat.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseDatabaseAndTableName.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Chunk.cpp |39.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Executors/PollingQueue.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/formatSettingName.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserUnionQueryElement.cpp |39.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |39.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSetRoleQuery.cpp |39.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserTablesInSelectQuery.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserCheckQuery.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSetQuery.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSelectWithUnionQuery.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSelectQuery.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserExplainQuery.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDictionaryAttributeDeclaration.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ConcatProcessor.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDropQuery.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDescribeTableQuery.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserCase.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDictionary.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSetRoleQuery.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/Lexer.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSelectQuery.cpp |40.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTUserNameWithHost.cpp |40.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/CommonParsers.cpp |40.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/sql/ydb-tests-sql |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/InsertQuerySettingsPushDownVisitor.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTTablesInSelectQuery.cpp |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/provider/libproviders-s3-provider.a |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSystemQuery.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTShowTablesQuery.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTShowGrantsQuery.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSettingsProfileElement.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQueryWithTableAndOutput.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSelectWithUnionQuery.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSampleRatio.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserCreateQuery.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQualifiedAsterisk.cpp |40.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserBackupQuery.cpp |40.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTKillQueryQuery.cpp |40.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTWindowDefinition.cpp |40.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTInsertQuery.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTIndexDeclaration.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTFunctionWithKeyValueArguments.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTIdentifier.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/ClientInfo.cpp |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/invoke_builtins/llvm16/libminikql-invoke_builtins-llvm16.a |40.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTDictionary.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromFileDescriptorDiscardOnFailure.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTConstraintDeclaration.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTFunction.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTColumnsMatcher.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTExpressionList.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTColumnsTransformers.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTDatabaseOrNone.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTDictionaryAttributeDeclaration.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTColumnDeclaration.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/ProfileEventsExt.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/TablesStatus.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/QueryThreadLog.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/SynchronousReader.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ThreadPoolReader.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadSettings.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromFileDescriptor.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromFile.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/Progress.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadHelpers.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromPocoSocket.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromFileBase.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromMemory.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/PeekableReadBuffer.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationNullable.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationNothing.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/verbosePrintString.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/JSONEachRowUtils.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationMap.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ThreadStatus.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/RemoteHostFilter.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationLowCardinality.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ProfileEvents.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/CurrentMetrics.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/CurrentMemoryTracker.cpp |42.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Config/AbstractConfigurationComparison.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ClickHouseRevision.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Allocator.cpp |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ydb/libydb-services-ydb.a |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnFunction.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/MaskOperations.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Exception.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnLowCardinality.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/DNSResolver.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/AlignedBuffer.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/IColumn.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/FilterDescription.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnNullable.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnDecimal.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnString.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/PODArray.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnsCommon.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnTuple.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/OpenSSLHelpers.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/MemoryTracker.cpp |42.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnFixedString.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/CurrentThread.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Epoll.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ProcfsMetricsProvider.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/FieldVisitorDump.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnMap.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/createHardLink.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ZooKeeper/IKeeper.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Throttler.cpp |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/liblibs-apache-arrow.a |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/PipeFDs.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/checkStackSize.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/TaskStatsInfoGetter.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ThreadPool.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ThreadProfileEvents.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/TimerDescriptor.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/formatReadable.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/formatIPv6.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/escapeForFileName.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ErrorCodes.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnVector.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/registerDataTypeDateTime.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/FormatFactory.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationTupleElement.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationTuple.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationString.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationWrapper.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/getLeastSupertype.cpp |43.4%| [CP] {default-linux-x86_64, release, asan} $(B)/common_test.context |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/registerFormats.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationUUID.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMapReadBufferFromFileWithCache.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/ProtobufWriter.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/OpenedFile.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/ProtobufReader.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationNumber.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/NativeFormat.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMappedFileDescriptor.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/QueryLog.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMappedFile.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteHelpers.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/InternalTextLogsQueue.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/copyData.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferValidUTF8.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromFileDescriptor.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromFile.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/UseSSL.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/TimeoutSetter.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromFileBase.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromPocoSocket.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTAsterisk.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/castColumn.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/createReadBufferFromFileBase.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/readFloatText.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/parseDateTimeBestEffort.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTBackupQuery.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTAlterQuery.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTDropQuery.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTOptimizeQuery.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTProjectionSelectQuery.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTOrderByElement.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQueryWithOnCluster.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTNameTypePair.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTProjectionDeclaration.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTRolesOrUsersSet.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTLiteral.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSubquery.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQueryParameter.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSetQuery.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTCreateQuery.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTPartition.cpp |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTTTLElement.cpp |45.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/IAST.cpp |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTWithElement.cpp |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTWithAlias.cpp |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ExpressionListParsers.cpp |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/IParserBase.cpp |45.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQueryWithOutput.cpp |45.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDatabaseOrNone.cpp |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserAlterQuery.cpp |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDataType.cpp |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ExpressionElementParsers.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserPartition.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserProjectionSelectQuery.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserRenameQuery.cpp |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserKillQueryQuery.cpp |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserInsertQuery.cpp |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserOptimizeQuery.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSampleRatio.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserExternalDDLQuery.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserRolesOrUsersSet.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserQuery.cpp |45.8%| PREPARE $(CLANG_FORMAT-1286082657) |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserTablePropertiesQuery.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserShowTablesQuery.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/IOutputFormat.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserShowPrivilegesQuery.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSystemQuery.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSettingsProfileElement.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/formatAST.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserWatchQuery.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserUserNameWithHost.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserUseQuery.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserWithElement.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/queryToString.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseUserName.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/QueryWithOutputSettingsPushDownVisitor.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseQuery.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseIdentifierOrStringLiteral.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/AvroRowInputFormat.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/TokenIterator.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/TabSeparatedRowInputFormat.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/CHColumnToArrowColumn.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/CSVRowInputFormat.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/JSONAsStringRowInputFormat.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/JSONEachRowRowOutputFormat.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/JSONEachRowRowInputFormat.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/TSKVRowInputFormat.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ParquetBlockOutputFormat.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ParquetBlockInputFormat.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseIntervalKind.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/DoubleConverter.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/RawBLOBRowInputFormat.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/extractTimeZoneFromFunctionArguments.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/TabSeparatedRowOutputFormat.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/IFunction.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/CompressionMethod.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/FunctionHelpers.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ArrowColumnToCHColumn.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/LimitTransform.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ResizeProcessor.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Port.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ISource.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/FunctionFactory.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/AsynchronousReadBufferFromFileDescriptor.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ISimpleTransform.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/toFixedString.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/IAccumulatingTransform.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/SettingsFields.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/BaseSettings.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/clickhouse_client_udf.cpp |47.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/NamesAndTypes.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/Field.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/SettingsEnums.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeCustomGeo.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/NativeBlockInputStream.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/IBlockInputStream.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/materializeBlock.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDateTime.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeCustomIPv4AndIPv6.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/SizeLimits.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDate32.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeCustomSimpleAggregateFunction.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDate.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeInterval.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/NestedUtils.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDecimalBase.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeNothing.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/Settings.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationFixedString.cpp |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationIP.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeFixedString.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDate.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeEnum.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDate32.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeFactory.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDateTime.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/getMultipleKeysFromConfig.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeUUID.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeLowCardinality.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/parseAddress.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/isLocalAddress.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/hex.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeFunction.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/DateLUT.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressionCodecMultiple.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/getThreadId.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/FieldVisitorToString.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressionCodecLZ4.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/FieldVisitorWriteBinary.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressedReadBufferFromFile.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/quoteString.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/sleep.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/mremap.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/getFQDNOrHostName.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/errnoToString.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/getPageSize.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/IPv6ToBinary.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/getResource.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/shift10.cpp |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/command_base/libydb_cli_command_base.a |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/preciseExp10.cpp |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/scheme_types/libpublic-lib-scheme_types.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/extensions/solomon_stats/libclient-extensions-solomon_stats.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/ydb_discovery/libydb_cli_command_ydb_discovery.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/libsrc-client-federated_topic.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/driver/libsrc-client-driver.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/common_client/libsrc-client-common_client.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/hmac/libfq-libs-hmac.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/events/liblibs-rate_limiter-events.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/liblibs-row_dispatcher-format_handler.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/health/libfq-libs-health.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/grpc/libfq-libs-grpc.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/export/libsrc-client-export.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/hash/libformats-arrow-hash.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/parsers/librow_dispatcher-format_handler-parsers.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/discovery/libsrc-client-discovery.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sparsed/libarrow-accessor-sparsed.global.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/plain/libarrow-accessor-plain.global.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/coordination/libsrc-client-coordination.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sub_columns/libarrow-accessor-sub_columns.global.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/libydb-core-external_sources.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/object_storage/libcore-external_sources-object_storage.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/composite_serial/libarrow-accessor-composite_serial.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/docapi/libydb-core-docapi.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/actors/liblibs-control_plane_proxy-actors.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/engine/minikql/libcore-engine-minikql.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/scheme_cache_lib/libcore-client-scheme_cache_lib.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/object_storage/inference/libexternal_sources-object_storage-inference.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sparsed/libarrow-accessor-sparsed.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/util/libcms-console-util.a |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/FunctionsConversion.cpp |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/metadata/libcore-client-metadata.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/discovery/libydb-core-discovery.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/plain/libarrow-accessor-plain.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/libcore-blobstorage-vdisk.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/common/libformats-arrow-common.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/libcore-formats-arrow.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/hullcompdelete/libvdisk-hullop-hullcompdelete.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/validators/libcms-console-validators.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sub_columns/libarrow-accessor-sub_columns.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/dictionary/libformats-arrow-dictionary.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/generated/libcore-base-generated.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/erasure/libydb-core-erasure.a |49.8%| PREPARE $(FLAKE8_PY3-715603131) |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/recovery/libvdisk-hulldb-recovery.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/yson/libcpp-yt-yson.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/local_executor/libcpp-threading-local_executor.a |50.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/system/libcpp-yt-system.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/threading/libcpp-yt-threading.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/minikql_compile/libcore-client-minikql_compile.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/misc/libcpp-yt-misc.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/groupinfo/libcore-blobstorage-groupinfo.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/string/libcpp-yt-string.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/dsproxy/mock/libblobstorage-dsproxy-mock.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/libvdisk-hulldb-compstrat.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/xml/init/libcpp-xml-init.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/draft/libsrc-client-draft.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/memory/libcpp-yt-memory.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/datastreams/libsrc-client-datastreams.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/xml/document/libcpp-xml-document.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/cron/libcpp-threading-cron.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/libvdisk-hulldb-fresh.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/blocking_queue/libcpp-threading-blocking_queue.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/atomic/libcpp-threading-atomic.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/old_crc/libcpp-digest-old_crc.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/config/libcpp-messagebus-config.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/md5/libcpp-digest-md5.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/program/libformats-arrow-program.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/unittest_main/libcpp-testing-unittest_main.a |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/CastOverloadResolver.cpp |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/json/libmonlib-encode-json.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/split/libcpp-deprecated-split.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/dynamic_counters/libcpp-monlib-dynamic_counters.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/buffered/libmonlib-encode-buffered.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/metrics/libcpp-monlib-metrics.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/murmur/libcpp-digest-murmur.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/crc32c/libcpp-digest-crc32c.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/libcpp-digest-argonish.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/syncer/libblobstorage-vdisk-syncer.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/lower_case/libcpp-digest-lower_case.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/monitoring/libcpp-messagebus-monitoring.a |50.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/objcopy_36807918bd7a86c1ea37310c9c.o |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/disjoint_sets/liblibrary-cpp-disjoint_sets.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dns/liblibrary-cpp-dns.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/actor/libmessagebus_actor.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/stack_vector/libcpp-containers-stack_vector.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/www/libcpp-messagebus-www.global.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/ssse3/libinternal-proxies-ssse3.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/sse41/libinternal-proxies-sse41.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/protobuf/libmessagebus_protobuf.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/compproto/liblibrary-cpp-compproto.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/oldmodule/libcpp-messagebus-oldmodule.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/sse2/libinternal-proxies-sse2.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/lz4/libblockcodecs-codecs-lz4.global.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/www/libcpp-messagebus-www.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dbg_output/liblibrary-cpp-dbg_output.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/libblobstorage-vdisk-hullop.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/http-parser/libcontrib-restricted-http-parser.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/fastlz/libblockcodecs-codecs-fastlz.global.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/case_insensitive_string/liblibrary-cpp-case_insensitive_string.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/scheduler/libcpp-messagebus-scheduler.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/codecs/greedy_dict/libcpp-codecs-greedy_dict.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/liblibrary-cpp-messagebus.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/fcontext_impl/libboost-context-fcontext_impl.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/mime/types/libcpp-mime-types.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/deprecated/json/libmonlib-deprecated-json.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/bit_io/liblibrary-cpp-bit_io.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/brotli/libblockcodecs-codecs-brotli.global.a |51.4%| PREPARE $(TEST_TOOL_HOST-sbr:8444524403) |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/charset/lite/libcpp-charset-lite.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/iostreams/librestricted-boost-iostreams.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/exception/libcpp-monlib-exception.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/container/librestricted-boost-container.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/tdigest/liblibrary-cpp-tdigest.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/uriparser/libcontrib-restricted-uriparser.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/coroutine/librestricted-boost-coroutine.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/legacy_protobuf/protos/libencode-legacy_protobuf-protos.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/libcpp-monlib-encode.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/chrono/librestricted-boost-chrono.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/spack/libmonlib-encode-spack.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/atomic/librestricted-boost-atomic.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-compression/librestricted-aws-aws-c-compression.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/server/libcore-client-server.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/googletest/googletest/librestricted-googletest-googletest.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-io/librestricted-aws-aws-c-io.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yajl/libcontrib-libs-yajl.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/utf8proc/libcontrib-libs-utf8proc.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/thrift/libcontrib-restricted-thrift.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-http/librestricted-aws-aws-c-http.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yaml-cpp/libcontrib-libs-yaml-cpp.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/prometheus/libmonlib-encode-prometheus.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/terminate_handler/liblibrary-cpp-terminate_handler.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/pcre16/liblibs-pcre-pcre16.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/NetSSL_OpenSSL/liblibs-poco-NetSSL_OpenSSL.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dot_product/liblibrary-cpp-dot_product.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/sasl/libcontrib-libs-sasl.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/pcre32/liblibs-pcre-pcre32.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/MCDisassembler/liblib-MC-MCDisassembler.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IRPrinter/libllvm16-lib-IRPrinter.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest_extensions/libcpp-testing-gtest_extensions.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/terminate_handler/liblibrary-cpp-terminate_handler.global.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/libllvm16-lib-ExecutionEngine.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gmock_in_unittest/libcpp-testing-gmock_in_unittest.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/common/libcpp-testing-common.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/liburing/libcontrib-libs-liburing.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gmock_in_unittest/libcpp-testing-gmock_in_unittest.global.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/text/libmonlib-encode-text.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libidn/static/liblibs-libidn-static.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hdr_histogram/libcontrib-libs-hdr_histogram.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/s2n/librestricted-aws-s2n.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/nodewarden/libcore-blobstorage-nodewarden.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libaio/static/liblibs-libaio-static.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon32/liblibs-base64-neon32.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/AsmParser/libllvm16-lib-AsmParser.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/crcutil/libcontrib-libs-crcutil.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/avx2/liblibs-base64-avx2.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fmt/libcontrib-libs-fmt.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libpy3yql-essentials-protos.global.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libpy3core-issue-protos.global.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_extra/liblibs-libevent-event_extra.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Net/liblibs-poco-Net.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse41/libfarmhash-arch-sse41.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/hot_swap/libcpp-threading-hot_swap.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/task_scheduler/libcpp-threading-task_scheduler.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/skip_list/libcpp-threading-skip_list.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/CodeView/liblib-DebugInfo-CodeView.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/messagebus/libcpp-monlib-messagebus.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/backtrace/cursors/libunwind/libbacktrace-cursors-libunwind.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr4_cpp_runtime/libcontrib-libs-antlr4_cpp_runtime.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ProfileData/libllvm16-lib-ProfileData.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/canonical/libpy3tests-oss-canonical.global.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/future/libcpp-threading-future.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/ydb_sdk_import/libpy3tests-oss-ydb_sdk_import.global.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/queue/libcpp-threading-queue.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/equeue/libcpp-threading-equeue.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/poor_man_openmp/libcpp-threading-poor_man_openmp.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libcore-file_storage-proto.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/light_rw_lock/libcpp-threading-light_rw_lock.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/liblibrary-login-protos.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_corei7/liblibs-hyperscan-runtime_corei7.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libscheme-defaults-protos.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/libpy3core-protos-schemeshard.global.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libcore-config-protos.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libcolumnshard-engines-protos.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/thread_local/libcpp-threading-thread_local.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/time_provider/liblibrary-cpp-time_provider.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libxml/libcontrib-libs-libxml.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/uri/liblibrary-cpp-uri.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/pytest/plugins/libpy3python-pytest-plugins.global.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/windows/libpy3library-python-windows.global.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/constructor/libpy3python-import_tracing-constructor.global.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/lib/libpy3python-import_tracing-lib.global.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/filelock/libpy3library-python-filelock.global.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/cores/libpy3library-python-cores.global.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/libcore-cms-console.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cpuid_check/liblibrary-cpp-cpuid_check.global.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_core2/liblibs-hyperscan-runtime_core2.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/liblibrary-cpp-unified_agent_client.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_avx2/liblibs-hyperscan-runtime_avx2.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/types/libabseil-cpp-absl-types.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/libffi/libcontrib-restricted-libffi.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/llhttp/libcontrib-restricted-llhttp.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml.clib/py3/libpy3python-ruamel.yaml.clib-py3.global.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/base/libabseil-cpp-absl-base.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/liblibrary-cpp-unified_agent_client.global.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/log/libabseil-cpp-absl-log.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/wheel/libpy3contrib-python-wheel.global.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/type_info/liblibrary-cpp-type_info.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/liblibs-config-protos.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/six/py3/libpy3python-six-py3.global.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/flags/libabseil-cpp-absl-flags.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/orc/liblibs-apache-orc.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/stack-data/libpy3contrib-python-stack-data.global.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/proto/libcpp-unified_agent_client-proto.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml.clib/py3/libpy3python-ruamel.yaml.clib-py3.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pure-eval/libpy3contrib-python-pure-eval.global.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ptyprocess/py3/libpy3python-ptyprocess-py3.global.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tenacity/py3/libpy3python-tenacity-py3.global.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/lib2/py/libpy3python3-lib2-py.global.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/common/libcore-backup-common.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/cpu_clock/libcpp-yt-cpu_clock.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/json/libcpp-yson-json.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson_pull/libyson_pull.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/liblibrary-cpp-yson.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/packaging/py3/libpy3python-packaging-py3.global.a |53.5%| [CF] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/sandbox.cpp |53.6%| [CF] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/build_info.cpp |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jsonschema/py3/libpy3python-jsonschema-py3.global.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pexpect/py3/libpy3python-pexpect-py3.global.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Jinja2/py3/libpy3python-Jinja2-py3.global.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/libcontrib-libs-googleapis-common-protos.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/parso/py3/libpy3python-parso-py3.global.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/importlib-resources/libpy3contrib-python-importlib-resources.global.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.functools/py3/libpy3python-jaraco.functools-py3.global.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/asttokens/libpy3contrib-python-asttokens.global.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/frozenlist/libpy3contrib-python-frozenlist.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ipython/py3/libpy3python-ipython-py3.global.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/cancellation/libcpp-threading-cancellation.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/node/libcpp-yson-node.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiosignal/libpy3contrib-python-aiosignal.global.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/backtrace/libcpp-yt-backtrace.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/assert/libcpp-yt-assert.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/logging/libcpp-yt-logging.a |53.6%| PREPARE $(OS_SDK_ROOT-sbr:243881345) |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/malloc/libcpp-yt-malloc.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/exception/libcpp-yt-exception.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/logging/plain_text_formatter/libyt-logging-plain_text_formatter.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/frozenlist/libpy3contrib-python-frozenlist.global.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyJWT/py3/libpy3python-PyJWT-py3.global.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.global.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zlib/libcontrib-libs-zlib.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyYAML/py3/libpy3python-PyYAML-py3.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libunwind/libcontrib-libs-libunwind.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cachetools/py3/libpy3python-cachetools-py3.global.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IR/libllvm16-lib-IR.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protobuf/libpy3protobuf-builtin_proto-protos_from_protobuf.global.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/future/py3/libpy3python-future-py3.global.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohttp/libpy3contrib-python-aiohttp.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohttp/libpy3contrib-python-aiohttp.global.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/global/libcpp-yt-global.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clang18-rt/lib/asan_cxx/libclang_rt.asan_cxx-x86_64.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/address_sorting/libgrpc-third_party-address_sorting.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/xxhash/libcontrib-libs-xxhash.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/double-conversion/libcontrib-libs-double-conversion.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clang18-rt/lib/asan_static/libclang_rt.asan_static-x86_64.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/charset-normalizer/libpy3contrib-python-charset-normalizer.global.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxrt/liblibs-cxxsupp-libcxxrt.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/util/draft/libutil-draft.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/expat/libcontrib-libs-expat.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/base/libcore-blobstorage-base.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/executing/libpy3contrib-python-executing.global.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/decorator/py3/libpy3python-decorator-py3.global.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ipdb/py3/libpy3python-ipdb-py3.global.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/version/libversion_definition.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/iniconfig/libpy3contrib-python-iniconfig.global.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/controller/libcore-backup-controller.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/idna/py3/libpy3python-idna-py3.global.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/matplotlib-inline/libpy3contrib-python-matplotlib-inline.global.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/re2/libcontrib-libs-re2.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/yson_string/libcpp-yt-yson_string.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cryptography/py3/libpy3python-cryptography-py3.global.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxabi-parts/liblibs-cxxsupp-libcxxabi-parts.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/actorlib_impl/libydb-core-actorlib_impl.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yaml/libcontrib-libs-yaml.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/c-ares/libcontrib-libs-c-ares.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/multidict/libpy3contrib-python-multidict.global.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/multidict/libpy3contrib-python-multidict.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/audit/libydb-core-audit.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cryptography/py3/libpy3python-cryptography-py3.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyHamcrest/py3/libpy3python-PyHamcrest-py3.global.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lzma/libcontrib-libs-lzma.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cffi/py3/libpy3python-cffi-py3.global.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/python-dateutil/py3/libpy3python-python-dateutil-py3.global.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/error/libcpp-yt-error.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/typeguard/libpy3contrib-python-typeguard.global.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/typing-extensions/py3/libpy3python-typing-extensions-py3.global.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/common/proto/libbackup-common-proto.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyrsistent/py3/libpy3python-pyrsistent-py3.global.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/rsa/py3/libpy3python-rsa-py3.global.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/protobuf/py3/libpy3python-protobuf-py3.global.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/common/libcore-blobstorage-common.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/impl/libcore-backup-impl.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyasn1-modules/py3/libpy3python-pyasn1-modules-py3.global.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests/py3/libpy3python-requests-py3.global.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/backpressure/libcore-blobstorage-backpressure.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/oauthlib/libpy3contrib-python-oauthlib.global.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/wcwidth/py3/libpy3python-wcwidth-py3.global.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/urllib3/py3/libpy3python-urllib3-py3.global.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/protobuf/py3/libpy3python-protobuf-py3.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyasn1/py3/libpy3python-pyasn1-py3.global.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/libcore-blobstorage-crypto.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/chacha_512/libblobstorage-crypto-chacha_512.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/hash/libabseil-cpp-tstring-y_absl-hash.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/grpcio/py3/libpy3python-grpcio-py3.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/kubernetes/libpy3contrib-python-kubernetes.global.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ydb/py3/libpy3python-ydb-py3.global.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/sqlite3/libcontrib-libs-sqlite3.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/numeric/libabseil-cpp-tstring-y_absl-numeric.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/lwtrace_probes/libcore-blobstorage-lwtrace_probes.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/other/libcore-blobstorage-other.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/random/libabseil-cpp-tstring-y_absl-random.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/defrag/libblobstorage-vdisk-defrag.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/libvdisk-hulldb-cache_block.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/synchronization/libabseil-cpp-tstring-y_absl-synchronization.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/bulksst_add/libvdisk-hulldb-bulksst_add.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/libvdisk-hulldb-base.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.collections/libpy3contrib-python-jaraco.collections.global.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/libvdisk-hulldb-barriers.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/mock/libblobstorage-pdisk-mock.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/hash/libabseil-cpp-absl-hash.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/common/libblobstorage-vdisk-common.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/libydb-core-base.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/time/libabseil-cpp-tstring-y_absl-time.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/profiling/libabseil-cpp-absl-profiling.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blob_depot/agent/libcore-blob_depot-agent.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/incrhuge/libcore-blobstorage-incrhuge.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/libblobstorage-vdisk-anubis_osiris.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/ingress/libblobstorage-vdisk-ingress.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openssl/libcontrib-libs-openssl.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/system/libsystem_allocator.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Modules/_sqlite/libpy3python3-Modules-_sqlite.global.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/balance/libblobstorage-vdisk-balance.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zstd/libblockcodecs-codecs-zstd.global.a |54.0%| {BAZEL_DOWNLOAD} $(B)/library/cpp/sanitizer/plugin/sanitizer.py.pyplugin |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/certifi/libpy3library-python-certifi.global.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/synchronization/libabseil-cpp-absl-synchronization.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/core/libcpp-blockcodecs-core.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/time/libabseil-cpp-absl-time.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/func/libpy3library-python-func.global.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/fs/libpy3library-python-fs.global.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blockstore/core/libcore-blockstore-core.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/reservoir_sampling/libpy3library-python-reservoir_sampling.global.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/retry/libpy3library-python-retry.global.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/absl_flat_hash/libcpp-containers-absl_flat_hash.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.global.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/strings/libpy3library-python-strings.global.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/svn_version/libpy3library-python-svn_version.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/change_exchange/libydb-core-change_exchange.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/strings/libpy3library-python-strings.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/libc/libpython-symbols-libc.global.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/huge/libblobstorage-vdisk-huge.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/lib/libcore-control-lib.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/validation/libcore-config-validation.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/libpy3python-symbols-module.global.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/localrecovery/libblobstorage-vdisk-localrecovery.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_common/libpy3python-testing-yatest_common.global.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/debug/libydb-core-debug.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/libvdisk-hulldb-generic.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/tools/enum_parser/enum_serialization_runtime/libtools-enum_parser-enum_serialization_runtime.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/libydb-core-control.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libpy3core-config-protos.global.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/libcore-protos-schemeshard.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libcore-scheme-protos.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/version/libversion.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/libpy3python-symbols-module.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_config_base/libcore-driver_lib-cli_config_base.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libpy3scheme-defaults-protos.global.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libpy3columnshard-common-protos.global.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libpy3columnshard-engines-protos.global.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/libpy3library-actors-protos.global.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/protos/libblobstorage-vdisk-protos.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/init/libcore-config-init.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/synclog/libblobstorage-vdisk-synclog.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/scrub/libblobstorage-vdisk-scrub.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/libpy3library-formats-arrow-protos.global.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/base_utils/libbase_utils.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/repl/libblobstorage-vdisk-repl.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/libpy3ydb-core-protos.global.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/libpy3library-login-protos.global.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Lib/libpy3tools-python3-Lib.global.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/liblibrary-actors-protos.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/query/libblobstorage-vdisk-query.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/libydb-core-cms.global.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/liblibrary-formats-arrow-protos.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_base/libcli_base.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/libpy3library-mkql_proto-protos.global.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libpy3ydb-library-services.global.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/engine/libydb-core-engine.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/dsproxy/libcore-blobstorage-dsproxy.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/liblibrary-ydb_issue-proto.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/common/liblibrary-formats-arrow-accessor-common.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/composite/liblibrary-formats-arrow-accessor-composite.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/filestore/core/libcore-filestore-core.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/libpy3api-service-protos.global.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing_common/libfq-libs-checkpointing_common.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/audit/events/liblibs-audit-events.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/libapi-service-protos.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/save_load/libformats-arrow-save_load.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/abstract/libarrow-accessor-abstract.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.global.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/libydb-core-formats.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/program/libformats-arrow-program.global.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/libpy3api-grpc-draft.global.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libpy3api-protos-annotations.global.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libapi-protos-annotations.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/walle/libpy3tools-cfg-walle.global.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/wardens/libpy3tests-library-wardens.global.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libyql-dq-proto.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/fq_runner/libpy3tests-tools-fq_runner.global.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libpy3api-protos.global.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse42_aesni/libfarmhash-arch-sse42_aesni.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/libpy3ydb-tests-library.global.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libpy3core-file_storage-proto.global.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/ydbd_slice/libpy3ydbd_slice.global.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/run/librun.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr3_cpp_runtime/libcontrib-libs-antlr3_cpp_runtime.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libyql-essentials-protos.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/transformer/libformats-arrow-transformer.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/reader/libformats-arrow-reader.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/switch/libformats-arrow-switch.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/container/libabseil-cpp-absl-container.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/proto/liblibs-checkpoint_storage-proto.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/audit/libfq-libs-audit.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/splitter/libformats-arrow-splitter.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/events/liblibs-checkpoint_storage-events.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/common/liblibs-brotli-common.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/cloud_audit/libfq-libs-cloud_audit.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/dec/liblibs-brotli-dec.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_config/events/liblibs-control_plane_config-events.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/orc-format/liblibs-apache-orc-format.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/skeleton/libblobstorage-vdisk-skeleton.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/common/libfq-libs-common.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/common/liblibs-compute-common.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cctz/libcontrib-libs-cctz.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse42/libfarmhash-arch-sse42.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/libcontrib-libs-farmhash.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blob_depot/libydb-core-blob_depot.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/libcontrib-tools-python3.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fastlz/libcontrib-libs-fastlz.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_openssl/liblibs-libevent-event_openssl.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/libcore-blobstorage-pdisk.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_thread/liblibs-libevent-event_thread.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_utils/libcli_utils.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/linuxvdso/original/liblibs-linuxvdso-original.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitstream/Reader/liblib-Bitstream-Reader.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/iam_private/libsrc-client-iam_private.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/control_plane/libcompute-ydb-control_plane.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/actors/libfq-libs-actors.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing/libfq-libs-checkpointing.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/BinaryFormat/libllvm16-lib-BinaryFormat.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/events/liblibs-control_plane_storage-events.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/libfq-libs-checkpoint_storage.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/proto/liblibs-control_plane_storage-proto.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libfyaml/libcontrib-libs-libfyaml.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/curl/libcontrib-libs-curl.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/icu/libcontrib-libs-icu.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/PerfJITEvents/liblib-ExecutionEngine-PerfJITEvents.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitcode/Reader/liblib-Bitcode-Reader.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/grpc_connections/libimpl-ydb_internal-grpc_connections.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Demangle/libllvm16-lib-Demangle.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/liblibs-compute-ydb.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/Orc/TargetProcess/libExecutionEngine-Orc-TargetProcess.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/libfq-libs-quota_manager.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/flatbuffers/libcontrib-libs-flatbuffers.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/libydb-core-cms.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Linker/libllvm16-lib-Linker.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/libapi-grpc-draft.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/impl/libclient-federated_topic-impl.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/RuntimeDyld/liblib-ExecutionEngine-RuntimeDyld.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/PDB/liblib-DebugInfo-PDB.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/iam/libsrc-client-iam.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Remarks/libllvm16-lib-Remarks.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/AsmPrinter/liblib-CodeGen-AsmPrinter.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/AsmParser/libTarget-X86-AsmParser.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/db_schema/libfq-libs-db_schema.a |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/splitter/ut/batch_slice.cpp |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/events/libfq-libs-events.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/db_id_async_resolver_impl/libfq-libs-db_id_async_resolver_impl.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/CFGuard/liblib-Transforms-CFGuard.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/DWARF/liblib-DebugInfo-DWARF.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/graph_params/proto/liblibs-graph_params-proto.a |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/splitter/ut/ut_splitter.cpp |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/TextAPI/libllvm16-lib-TextAPI.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/logs/libfq-libs-logs.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/TargetParser/libllvm16-lib-TargetParser.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/metrics/libfq-libs-metrics.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/init/libfq-libs-init.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_endpoints/libclient-impl-ydb_endpoints.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/gateway/libfq-libs-gateway.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/db_driver_state/libimpl-ydb_internal-db_driver_state.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lzmasdk/libcontrib-libs-lzmasdk.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openldap/libraries/liblber/libopenldap-libraries-liblber.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/ObjCARC/liblib-Transforms-ObjCARC.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lz4/libcontrib-libs-lz4.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/events/liblibs-quota_manager-events.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/internal/liblibs-control_plane_storage-internal.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nghttp2/libcontrib-libs-nghttp2.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/mock/libfq-libs-mock.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Frontend/OpenMP/liblib-Frontend-OpenMP.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/malloc_extension/liblibs-tcmalloc-malloc_extension.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/snappy/libcontrib-libs-snappy.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Crypto/liblibs-poco-Crypto.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/ngtcp2/libcontrib-libs-ngtcp2.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_config/libfq-libs-control_plane_config.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/random/libabseil-cpp-absl-random.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/libllvm16-lib-MC.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-event-stream/librestricted-aws-aws-c-event-stream.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-checksums/librestricted-aws-aws-checksums.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-sdkutils/librestricted-aws-aws-c-sdkutils.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/JSON/liblibs-poco-JSON.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Util/liblibs-poco-Util.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-common/librestricted-aws-aws-c-common.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openldap/libcontrib-libs-openldap.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/libfq-libs-control_plane_proxy.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/ucontext_impl/libboost-context-ucontext_impl.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/value_helpers/libimpl-ydb_internal-value_helpers.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/XML/liblibs-poco-XML.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/retry/libimpl-ydb_internal-retry.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/protos/libfq-libs-protos.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_stats/libclient-impl-ydb_stats.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-mqtt/librestricted-aws-aws-c-mqtt.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/MCTargetDesc/libTarget-X86-MCTargetDesc.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/plain_status/libimpl-ydb_internal-plain_status.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/logger/libimpl-ydb_internal-logger.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/events/liblibs-control_plane_proxy-events.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/proto/liblibs-quota_manager-proto.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Object/libllvm16-lib-Object.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/cityhash-1.0.2/libcontrib-restricted-cityhash-1.0.2.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/dragonbox/libdragonbox.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-crt-cpp/librestricted-aws-aws-crt-cpp.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/graph/librestricted-boost-graph.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/colorizer/liblibrary-cpp-colorizer.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/googletest/googlemock/librestricted-googletest-googlemock.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/thread/librestricted-boost-thread.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/quoter_service/liblibs-rate_limiter-quoter_service.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/legacy_zstd06/libblockcodecs-codecs-legacy_zstd06.global.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/control_plane_service/liblibs-rate_limiter-control_plane_service.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/snappy/libblockcodecs-codecs-snappy.global.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/lzma/libblockcodecs-codecs-lzma.global.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zlib/libblockcodecs-codecs-zlib.global.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cache/liblibrary-cpp-cache.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/liblibrary-cpp-blockcodecs.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/regex/librestricted-boost-regex.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cgiparam/liblibrary-cpp-cgiparam.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Instrumentation/liblib-Transforms-Instrumentation.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/thread_pool/libimpl-ydb_internal-thread_pool.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/utils/liblibs-rate_limiter-utils.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Support/libllvm16-lib-Support.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/read_rule/libfq-libs-read_rule.a |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_parser_ut.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/format_handler_ut.cpp |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-s3/liblibs-aws-sdk-cpp-aws-cpp-sdk-s3.a |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_filter_ut.cpp |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/events/liblibs-row_dispatcher-events.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/charset/liblibrary-cpp-charset.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/common/libformat_handler-ut-common.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/result_formatter/libfq-libs-result_formatter.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/2d_array/libcpp-containers-2d_array.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/bitseq/libcpp-containers-bitseq.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/disjoint_interval_tree/libcpp-containers-disjoint_interval_tree.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/SelectionDAG/liblib-CodeGen-SelectionDAG.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/comptrie/libcpp-containers-comptrie.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/intrusive_rb_tree/libcpp-containers-intrusive_rb_tree.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/stack_array/libcpp-containers-stack_array.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/config/liblibrary-cpp-config.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/coroutine/listener/libcpp-coroutine-listener.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/sorted_vector/libcpp-containers-sorted_vector.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/enum_codegen/libcpp-deprecated-enum_codegen.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/str_map/libcpp-containers-str_map.a |55.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/objcopy_0ab925f82bbba07bf3b749dc3c.o |54.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/01e1cebcd98e239de10ed70b94_raw.auxcpp |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/common/librow_dispatcher-format_handler-common.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/session_pool/libimpl-ydb_internal-session_pool.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/purecalc_compilation/liblibs-row_dispatcher-purecalc_compilation.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/shared_resources/interface/liblibs-shared_resources-interface.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/signer/libfq-libs-signer.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/protos/libcore-graph-protos.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/tasks_packer/libfq-libs-tasks_packer.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/filters/librow_dispatcher-format_handler-filters.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/protos/liblibs-row_dispatcher-protos.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/proto/libsrc-client-proto.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/ydb/libfq-libs-ydb.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/libcontrib-libs-grpc.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/include/libclient-persqueue_public-include.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/libcore-grpc_services-cancelation.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/test_connection/libfq-libs-test_connection.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/oauth2_token_exchange/libtypes-credentials-oauth2_token_exchange.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/service/libcore-graph-service.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/shard/protos/libgraph-shard-protos.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/protos/libgrpc_services-cancelation-protos.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/rate_limiter/libsrc-client-rate_limiter.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/login/libtypes-credentials-login.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/purecalc_no_pg_wrapper/liblibs-row_dispatcher-purecalc_no_pg_wrapper.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/libfq-libs-row_dispatcher.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/arrow/scheme/libio_formats-arrow-scheme.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/tablet/libcore-grpc_services-tablet.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/shared_resources/libfq-libs-shared_resources.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/events/libkqp-common-events.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/jaeger_tracing/libydb-core-jaeger_tracing.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/cell_maker/libcore-io_formats-cell_maker.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kesus/proxy/libcore-kesus-proxy.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/shard/libcore-graph-shard.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/compilation/libkqp-common-compilation.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/keyvalue/protos/libcore-keyvalue-protos.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/buffer/libkqp-common-buffer.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/batch/libkqp-common-batch.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/query_stats/libclient-table-query_stats.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/ss_tasks/libsrc-client-ss_tasks.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/common/libclient-topic-common.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/result/libsrc-client-result.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/shutdown/libkqp-common-shutdown.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_streaming/libydb-core-grpc_streaming.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/expr_nodes/libcore-kqp-expr_nodes.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/executer_actor/shards_resolver/libkqp-executer_actor-shards_resolver.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/codecs/libclient-topic-codecs.global.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/simple/libkqp-common-simple.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/libfq-libs-control_plane_storage.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/libcore-kqp-common.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/common/libimpl-ydb_internal-common.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/lib/libpy3tests-olap-lib.global.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/finalize_script_service/libcore-kqp-finalize_script_service.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.global.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/liblib-Target-X86.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/federated_query/libcore-kqp-federated_query.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/health_check/libydb-core-health_check.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/actors/libkqp-gateway-actors.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/counters/libcore-kqp-counters.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/scheme/libsrc-client-scheme.a |55.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/objcopy_488333b1ebd4c1d6d8ec5bcb8f.o |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.global.a |55.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/objcopy_0665be2c60952715f39eb25568.o |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/table/libgateway-behaviour-table.global.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Analysis/libllvm16-lib-Analysis.a |55.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/objcopy_1555e67a3dd43a3e7f09bf8eee.o |55.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/objcopy_1a397c908c9859dc40a771ddf1.o |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/libsrc-client-types.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.global.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/status/libclient-types-status.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/compute_actor/libcore-kqp-compute_actor.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/libydb-core-kqp.global.a |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpointing/ut/checkpoint_coordinator_ut.cpp |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.global.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/keyvalue/libydb-core-keyvalue.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/helpers/libpy3olap-scenario-helpers.global.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/query/impl/libclient-query-impl.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/node_service/libcore-kqp-node_service.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/run_script_actor/libcore-kqp-run_script_actor.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/topics/libcore-kqp-topics.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Scalar/liblib-Transforms-Scalar.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/query_data/libcore-kqp-query_data.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/uuid/libsrc-library-uuid.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/persqueue/obfuscate/libsdk-library-persqueue-obfuscate-v3.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kesus/tablet/libcore-kesus-tablet.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/jwt/libsrc-library-jwt.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/issue/libsrc-library-issue.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/local_rpc/libkqp-gateway-local_rpc.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/rm_service/libcore-kqp-rm_service.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/proto/libkqp-proxy_service-proto.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/grpc/client/libsdk-library-grpc-client-v3.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/actors/libkqp-workload_service-actors.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/impl/libclient-topic-impl.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/libcore-kqp-workload_service.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.global.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/factory/open_common/libstreams-factory-open_common.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/libcpp-streams-lz.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/memory_controller/libydb-core-memory_controller.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/tables/libkqp-workload_service-tables.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/operation_id/protos/liblibrary-operation_id-protos.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/address_classification/libcore-mind-address_classification.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/peephole/libkqp-opt-peephole.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/operation_id/libsrc-library-operation_id.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/libcore-kqp-proxy_service.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/events/libcore-persqueue-events.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/common/libkqp-workload_service-common.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/codecs/libcore-persqueue-codecs.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/config/libcore-persqueue-config.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/string_utils/helpers/liblibrary-string_utils-helpers.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/partition_key_range/libcore-persqueue-partition_key_range.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/persqueue/topic_parser_public/libsdk-library-persqueue-topic_parser_public-v3.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/out/libcore-protos-out.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/local_pgwire/libydb-core-local_pgwire.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/session_actor/libcore-kqp-session_actor.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/purecalc/libcore-persqueue-purecalc.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/lz4/libstreams-lz-lz4.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mon_alloc/libydb-core-mon_alloc.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/protos/libcore-pgproxy-protos.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/quoter/public/libcore-quoter-public.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/raw_socket/libydb-core-raw_socket.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/libydb-core-public_http.global.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/libcore-kqp-gateway.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/libydb-core-http_proxy.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mon/libydb-core-mon.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/protos/libcore-public_http-protos.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libapi-protos.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/libsrc-client-table.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/libydb-core-pgproxy.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/common/libservices-ext_index-common.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/auth/libydb-services-auth.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/libydb-core-public_http.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/host/libcore-kqp-host.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/query_compiler/libcore-kqp-query_compiler.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/manager/libservices-metadata-manager.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/protos/libapi-protos-persqueue-deprecated.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/libydb-core-scheme.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/writer/libcore-persqueue-writer.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/runtime/libcore-kqp-runtime.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/config/libydb-services-config.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme_types/libydb-core-scheme_types.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/ldap_auth_provider/libcore-security-ldap_auth_provider.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/kesus/libydb-services-kesus.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/grpc/libapi-grpc-persqueue-deprecated.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/database/libcore-statistics-database.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/executer_actor/libcore-kqp-executer_actor.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kafka_proxy/libydb-core-kafka_proxy.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/libydb-core-sys_view.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/resource_pool_classifiers/libcore-sys_view-resource_pool_classifiers.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/quoter/libydb-core-quoter.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/discovery/libydb-services-discovery.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/service/libcore-statistics-service.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/pg_tables/libcore-sys_view-pg_tables.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/decimal/libsrc-library-decimal.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/libydb-core-security.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/partition_stats/libcore-sys_view-partition_stats.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/keyvalue/libydb-services-keyvalue.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/extractor/libext_index-metadata-extractor.global.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/resource_pools/libcore-sys_view-resource_pools.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/common/libcore-sys_view-common.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/basics/libcore-testlib-basics.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/auth/libcore-sys_view-auth.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/tablets/libcore-sys_view-tablets.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/balance_coverage/libcore-tx-balance_coverage.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tracing/libydb-core-tracing.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/protos/libcolumnshard-bg_tasks-protos.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/query_stats/libcore-sys_view-query_stats.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/actors/libcore-testlib-actors.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/service/libservices-ext_index-service.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/storages_manager/libcolumnshard-blobs_action-storages_manager.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/service/libcore-sys_view-service.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/session/libcolumnshard-bg_tasks-session.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/events/libcolumnshard-bg_tasks-events.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/abstract/libcolumnshard-bg_tasks-abstract.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/libtx-columnshard-common.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/accessor/libmetadata-secret-accessor.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/counters/libcolumnshard-blobs_action-counters.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/aggregator/libcore-statistics-aggregator.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_reader/libtx-columnshard-blobs_reader.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/common/libkqp-ut-common.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/dynamic_config/libydb-services-dynamic_config.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/libservices-deprecated-persqueue_v0.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/extractor/libext_index-metadata-extractor.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/request/libservices-metadata-request.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/libydb-core-mind.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/abstract/libcolumnshard-blobs_action-abstract.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.global.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/tier/libcolumnshard-blobs_action-tier.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/counters/libtx-columnshard-counters.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/abstract/libcolumnshard-data_accessor-abstract.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.global.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/common/libcolumnshard-blobs_action-common.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/processor/libcore-sys_view-processor.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/libtx-columnshard-data_accessor.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_locks/locks/libcolumnshard-data_locks-locks.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/local/libcolumnshard-blobs_action-local.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.global.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/controller/libdata_sharing-initiator-controller.global.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/libdata_sharing-common-session.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/manager/libcolumnshard-bg_tasks-manager.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/events/libcolumnshard-blobs_action-events.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/physical/libkqp-opt-physical.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/counters/libengines-changes-counters.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.global.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/status/libdata_sharing-initiator-status.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/events/libdata_sharing-destination-events.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/events/libdata_sharing-modification-events.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/events/libdata_sharing-source-events.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/libllvm16-lib-CodeGen.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.global.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/replication/libydb-services-replication.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/test_tablet/libydb-core-test_tablet.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_cluster_discovery/cluster_ordering/libservices-persqueue_cluster_discovery-cluster_ordering.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/controller/libchanges-actualization-controller.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/loading/libcolumnshard-engines-loading.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/session/libdata_sharing-source-session.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/tasks/libdata_sharing-modification-tasks.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/predicate/libcolumnshard-engines-predicate.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common/libengines-reader-common.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/libengines-reader-abstract.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/portions/libcolumnshard-engines-portions.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.global.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/chunks/libreader-sys_view-chunks.global.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.global.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/rate_limiter/libydb-services-rate_limiter.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/logical/libkqp-opt-logical.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/libtx-columnshard-engines.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/libreader-common_reader-constructor.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes/libessentials-core-expr_nodes.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/dq_integration/libessentials-core-dq_integration.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/arrow_kernels/registry/libcore-arrow_kernels-registry.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/auth/libservices-lib-auth.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/libsimple_reader-iterator-sync_points.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/abstract/libreader-sys_view-abstract.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/physical/effects/libopt-physical-effects.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/ast/serialize/libessentials-ast-serialize.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/libydb-core-testlib.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/constructor/libreader-sys_view-constructor.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/granules/libreader-sys_view-granules.global.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/optimizer/libreader-sys_view-optimizer.global.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/credentials/libessentials-core-credentials.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/abstract/libengines-scheme-abstract.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/column/libengines-scheme-column.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/portions/libreader-sys_view-portions.global.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/transaction/libengines-reader-transaction.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/common/libengines-scheme-common.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/datastreams/libydb-services-datastreams.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/cbo/libessentials-core-cbo.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/common/libstorage-actualizer-common.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/tiering/libengines-scheme-tiering.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/libreader-simple_reader-iterator.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/libreader-plain_reader-iterator.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/libstorage-actualizer-abstract.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/counters/libstorage-actualizer-counters.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/chunks/libengines-storage-chunks.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet/libydb-core-tablet.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/libscheme-indexes-abstract.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/dq_integration/transform/libcore-dq_integration-transform.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/libcolumnshard-engines-scheme.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/libstorage-actualizer-scheme.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/hook/libcpp-testing-hook.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bits_storage/libstorage-indexes-bits_storage.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/libstorage-actualizer-tiering.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/versions/libengines-scheme-versions.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/insert_table/libcolumnshard-normalizer-insert_table.global.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/abstract/libcolumnshard-normalizer-abstract.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bits_storage/libstorage-indexes-bits_storage.global.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/libstorage-optimizer-abstract.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/liboptimizer-lcbuckets-constructor.global.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.global.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/granule/libengines-storage-granule.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/libstorage-indexes-count_min_sketch.global.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/libstorage-indexes-bloom.global.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/max/libstorage-indexes-max.global.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/skip_index/libstorage-indexes-skip_index.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/extractor/libindexes-portions-extractor.global.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/hive/libcore-mind-hive.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.global.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.global.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.global.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/logic/slices/libsbuckets-logic-slices.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_v1/libydb-services-persqueue_v1.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/constructor/liboptimizer-sbuckets-constructor.global.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/constructor/liboptimizer-lbuckets-constructor.global.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/common/liboptimizer-sbuckets-common.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/writer/libcolumnshard-engines-writer.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/ast/libyql-essentials-ast.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/fq/libydb-services-fq.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/loading/libtx-columnshard-loading.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/logic/one_head/libsbuckets-logic-one_head.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/counters/liboptimizer-sbuckets-counters.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_v1/actors/libservices-persqueue_v1-actors.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/provider/libcore-kqp-provider.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/bscontroller/libcore-mind-bscontroller.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/hooks/abstract/libcolumnshard-hooks-abstract.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/optimizer/liboptimizer-sbuckets-optimizer.global.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/writer/buffer/libengines-writer-buffer.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/index/liboptimizer-sbuckets-index.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/events/libcolumnshard-export-events.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/protos/libcolumnshard-export-protos.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/selector/abstract/libsession-selector-abstract.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/tier/libsession-storage-tier.global.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/actor/libcolumnshard-export-actor.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/libydb-core-persqueue.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/abstract/libsession-storage-abstract.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/libstorage-indexes-portions.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/service/libtx-limiter-service.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/selector/backup/libsession-selector-backup.global.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/usage/liblimiter-grouped_memory-usage.a |55.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/logic/abstract/libsbuckets-logic-abstract.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/s3/libsession-storage-s3.global.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/common/libcolumnshard-operations-common.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/schema_version/libcolumnshard-normalizer-schema_version.global.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/libtx-columnshard-operations.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/abstract/subscriber/libsubscriber-abstract-subscriber.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/resources/libtx-columnshard-resources.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/tables/libcolumnshard-normalizer-tables.global.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/batch_builder/libcolumnshard-operations-batch_builder.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/libtx-columnshard-transactions.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/resource_subscriber/libtx-columnshard-resource_subscriber.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/splitter/abstract/libcolumnshard-splitter-abstract.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tablet/libtx-columnshard-tablet.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/splitter/libtx-columnshard-splitter.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/events/tables_erased/libsubscriber-events-tables_erased.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/public/libtx-coordinator-public.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/slice_builder/libcolumnshard-operations-slice_builder.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/libydb-core-tx.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/private_client/libfq-libs-private_client.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/protos/libtx-coordinator-protos.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.global.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/protos/libcolumnshard-transactions-protos.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor/usage/libtx-conveyor-usage.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.global.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/data_events/common/libtx-data_events-common.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/abstract/events/libsubscriber-abstract-events.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.global.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/priorities/usage/libtx-priorities-usage.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/long_tx_service/public/libtx-long_tx_service-public.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/data_events/libcore-tx-data_events.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/service/liblimiter-grouped_memory-service.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/common/libtx-replication-common.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.global.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/adapter/libolap-bg_tasks-adapter.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/priorities/service/libtx-priorities-service.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/common/libtx-schemeshard-common.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/locks/libcore-tx-locks.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/usage/libtx-limiter-usage.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/scheme_cache/libcore-tx-scheme_cache.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tx_reader/libtx-columnshard-tx_reader.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.global.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/test_helper/libtx-columnshard-test_helper.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor/service/libtx-conveyor-service.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/transactions/libcolumnshard-transactions-transactions.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/program/libcore-tx-program.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/common/libcolumnshard-export-common.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/itsdangerous/py3/libpy3python-itsdangerous-py3.global.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/extractor/libindexes-portions-extractor.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/tablet/libcolumnshard-normalizer-tablet.global.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/index/libstorage-actualizer-index.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/ydb_proxy/libtx-replication-ydb_proxy.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/common/libscheme-defaults-common.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/long_tx_service/libcore-tx-long_tx_service.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/granule/libcolumnshard-normalizer-granule.global.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/ut_common/libtx-datashard-ut_common.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/service/libtx-replication-service.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/common/libpy3tests-olap-common.global.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.global.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/libsimple_reader-iterator-collections.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/actor/libengines-reader-actor.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/transactions/libdata_sharing-modification-transactions.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/protos/libcolumnshard-data_sharing-protos.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/manager/libcolumnshard-data_sharing-manager.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sparsed/libchanges-compaction-sparsed.global.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/mediator/libcore-tx-mediator.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/libengines-changes-compaction.a |56.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/operation_queue_timer.h_serialized.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__backup_collection_common.cpp |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/libcore-tx-coordinator.a |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__fix_bad_paths.cpp |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/scheme_board/libcore-tx-scheme_board.a |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_sequence.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_consistent_copy_tables.cpp |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.global.a |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_resource_pool.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_subdomain.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_replication.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_cdc_stream.cpp |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/libcolumnshard-engines-changes.a |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/user_attributes.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats_histogram.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_xxport__helpers.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_validate_ttl.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_index.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_lock.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_view.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_initiate_build_index.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_subdomain.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_sequence.cpp |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/controller/libtx-replication-controller.a |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_backup_collection.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_db_changes.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_index.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_solomon.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_sequence.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_index.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_bsv.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_continuous_backup.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_pq.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore_incremental_backup.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_rtmr.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_export.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_solomon.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_view.cpp |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/libydb-core-protos.a |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_fs.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_indexed_table.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_extsubdomain.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_table.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_data_source.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_kesus.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_resource_pool.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_cdc_stream.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_lock.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_sequence.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_unsafe.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_table.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_memory_changes.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_common.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__unmark_restore_tables.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_just_reject.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_domain_links.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__list.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__pq_stats.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_restore_backup_collection.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_tables.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__serverless_storage_billing.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table_index.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__sync_update_tenants.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__state_changed_reply.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_side_effects.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_split_merge.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__cancel.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__publish_to_scheme_board.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_billing_helpers.cpp |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/libcore-tx-columnshard.a |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_access_database.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_audit_log.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_audit_log_fragment.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__root_data_erasure_manager.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_bg_tasks__list.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__tenant_data_erasure_manager.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_part.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_schema.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_upgrade_subdomain.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_backup.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__create.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__get.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index_tx_base.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__forget.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__list.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__get.cpp |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/common_opt/libessentials-core-common_opt.a |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_effective_acl.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_scan.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__list.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__progress.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__get.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__forget.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_identificators.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path_element.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export_uploaders.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__cancel.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_getters.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_scheme_query_executor.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_flow_proposals.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_self_pinger.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_info_types.h_serialized.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_types.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_shard_deleter.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_types.h_serialized.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__create.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_svp_migration.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_utils.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path_describer.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__clean_pathes.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_build_index.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_info_types.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_fs.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__background_compaction.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_replication.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__borrowed_compaction.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_indexed_table.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__background_cleaning.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__data_erasure_manager.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_table.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__delete_tablet_reply.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__describe_scheme.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_populator.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_bsv.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup_collection.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__conditional_erase.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__monitoring.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_continuous_backup.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__find_subdomain_path_id.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_impl.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__notify.cpp |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__make_access_database_no_inheritable.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__list_users.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_table.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__login.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_login.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_data_source.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_root.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_bsv.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_continuous_backup.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_cdc_stream.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_index.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_table.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_fs.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_resource_pool.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_cancel_tx.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_solomon.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_cansel_build_index.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_assign_bsv.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_replication.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_kesus.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_backup_incremental_backup_collection.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_sequence.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_data_source.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_apply_build_index.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_backup_backup_collection.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_schema.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_bsv.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_blob_depot.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_cdc_stream.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_pq.cpp |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/common/libchanges-compaction-common.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/ut_helpers/libproviders-common-ut_helpers.a |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/etcd_proxy/etcd_proxy |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/abstract/libchanges-compaction-abstract.a |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_compaction.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_extsubdomain.cpp |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/controller/libdata_sharing-initiator-controller.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/construction/libchanges-actualization-construction.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/test_connection/events/liblibs-test_connection-events.a |56.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/objcopy_f8b2cbafb1fed0e25bf9683c2d.o |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/session/libdata_sharing-destination-session.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/context/libdata_sharing-common-context.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/insert_table/libcolumnshard-engines-insert_table.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/protos/libcolumnshard-blobs_action-protos.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_locks/manager/libcolumnshard-data_locks-manager.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_reader/libtx-columnshard-data_reader.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/transactions/libdata_sharing-destination-transactions.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/sessions/libcore-sys_view-sessions.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/protos/libcore-tablet_flat-protos.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/libengines-changes-abstract.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/colorama/py3/libpy3python-colorama-py3.global.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/transactions/libdata_sharing-source-transactions.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/click/py3/libpy3python-click-py3.global.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/nodes/libcore-sys_view-nodes.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/transactions/libcolumnshard-bg_tasks-transactions.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/certificate_check/libcore-security-certificate_check.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/libtx-columnshard-blobs_action.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/resource_pools/libydb-core-resource_pools.a |56.4%| PREPARE $(WITH_JDK-sbr:7832760150) |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/log_backend/libydb-core-log_backend.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/metering/libydb-core-metering.a |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init.cpp |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/show_create/libcore-sys_view-show_create.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/bs/libcolumnshard-blobs_action-bs.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/snappy/libstreams-lz-snappy.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/transaction/libcolumnshard-blobs_action-transaction.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/factory/open_by_signature/libstreams-factory-open_by_signature.a |56.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/objcopy_e31620202d3ba8df14ff2a18e1.o |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/utils/libkqp-gateway-utils.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.global.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.global.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/events/libolap-bg_tasks-events.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/ydb_dump/libcore-io_formats-ydb_dump.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/storage/libcore-sys_view-storage.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/protos/libolap-bg_tasks-protos.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_caching/libydb-core-grpc_caching.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/extension_common/libsrc-client-extension_common.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/compile_service/libcore-kqp-compile_service.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/abstract/libtx-tiering-abstract.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/counters/libcore-grpc_services-counters.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/layout/libschemeshard-olap-layout.a |56.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/transactions/libolap-bg_tasks-transactions.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/planner/libproviders-dq-planner.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/libolap-bg_tasks-tx_chain.a |56.4%| PREPARE $(JDK17-472926544) |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.global.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/indexes/libschemeshard-olap-indexes.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/common/libschemeshard-olap-common.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpcds-dbgen/libbenchmarks-gen-tpcds-dbgen.global.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/provider/exec/libdq-provider-exec.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpch-dbgen/libbenchmarks-gen-tpch-dbgen.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/libpy3python-MarkupSafe-py3.global.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/manager/libschemeshard-olap-manager.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/queries/tpch/libbenchmarks-queries-tpch.global.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/queries/tpcds/libbenchmarks-queries-tpcds.global.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protoc/libpy3protobuf-builtin_proto-protos_from_protoc.global.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/columns/libschemeshard-olap-columns.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/common/liboperations-alter-common.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpcds-dbgen/libbenchmarks-gen-tpcds-dbgen.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/column_families/libschemeshard-olap-column_families.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libc_compat/libcontrib-libs-libc_compat.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/liboperations-alter-abstract.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/upb/libgrpc-third_party-upb.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/builtins/liblibs-cxxsupp-builtins.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyYAML/py3/libpy3python-PyYAML-py3.global.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/schema/libschemeshard-olap-schema.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/common/libalter-in_store-common.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/transfer/libalter-in_store-transfer.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/attrs/py3/libpy3python-attrs-py3.global.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/schema/libalter-in_store-schema.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/store/libschemeshard-olap-store.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/liboperations-alter-in_store.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cffi/py3/libpy3python-cffi-py3.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/config_shards/libalter-in_store-config_shards.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/backup/libkikimr_backup.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/options/libschemeshard-olap-options.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/fq/libpublic-lib-fq.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.text/libpy3contrib-python-jaraco.text.global.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceshard/public/libtx-sequenceshard-public.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/csv/table/libarrow-csv-table.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/liboperations-alter-standalone.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/grpcio/py3/libpy3python-grpcio-py3.global.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceproxy/public/libtx-sequenceproxy-public.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jedi/py3/libpy3python-jedi-py3.global.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/google-auth/py3/libpy3python-google-auth-py3.global.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/platformdirs/libpy3contrib-python-platformdirs.global.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/more-itertools/py3/libpy3python-more-itertools-py3.global.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pluggy/py3/libpy3python-pluggy-py3.global.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pycparser/py3/libpy3python-pycparser-py3.global.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests-oauthlib/libpy3contrib-python-requests-oauthlib.global.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/load_test/libydb-core-load_test.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd/libcontrib-libs-zstd.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/yarl/libpy3contrib-python-yarl.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/py/py3/libpy3python-py-py3.global.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml/py3/libpy3python-ruamel.yaml-py3.global.a |56.4%| PREPARE $(WITH_JDK17-sbr:7832760150) |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Pygments/py3/libpy3python-Pygments-py3.global.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pretty_types_print/protobuf/liblibrary-pretty_types_print-protobuf.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/setuptools/py3/libpy3python-setuptools-py3.global.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/evlog/libcore-util-evlog.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/util/liblibrary-actors-util.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/tier/libtx-tiering-tier.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pytest/py3/libpy3python-pytest-py3.global.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tracing/usage/libtx-tracing-usage.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/websocket-client/libpy3contrib-python-websocket-client.global.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/traitlets/py3/libpy3python-traitlets-py3.global.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceshard/libcore-tx-sequenceshard.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/resharding/libalter-in_store-resharding.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/time_cast/libcore-tx-time_cast.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/base/libcore-ymq-base.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/prompt-toolkit/py3/libpy3python-prompt-toolkit-py3.global.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/yarl/libpy3contrib-python-yarl.global.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_allocator/libcore-tx-tx_allocator.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/libschemeshard-olap-operations.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_allocator_client/libcore-tx-tx_allocator_client.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/container/libabseil-cpp-tstring-y_absl-container.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/base/libabseil-cpp-tstring-y_absl-base.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/debugging/libabseil-cpp-tstring-y_absl-debugging.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/opt/libproviders-dq-opt.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/actorsys_test/libcore-util-actorsys_test.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/flags/libabseil-cpp-tstring-y_absl-flags.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/profiling/libabseil-cpp-tstring-y_absl-profiling.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/log/libabseil-cpp-tstring-y_absl-log.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/status/libabseil-cpp-tstring-y_absl-status.a |56.6%| PREPARE $(JDK_DEFAULT-472926544) |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/libydb-core-util.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ydb_convert/libydb-core-ydb_convert.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/types/libabseil-cpp-tstring-y_absl-types.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/events/libcore-wrappers-events.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/numeric/libabseil-cpp-absl-numeric.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/yaml/libcore-viewer-yaml.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/find_root/libpy3library-python-find_root.global.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/common/libymq-queues-common.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/api/libcpp-malloc-api.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_wrapper/libessentials-parser-pg_wrapper.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Modules/_sqlite/libpy3python3-Modules-_sqlite.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/std/libymq-queues-std.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/proto/libcore-ymq-proto.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/actor_type/liblibrary-actors-actor_type.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/libabseil-cpp-tstring-y_absl-strings.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/fifo/libymq-queues-fifo.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/accessor/libydb-library-accessor.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/testlib/liblibrary-actors-testlib.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/protos/liblibrary-aclib-protos.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/resource/libpy3library-python-resource.global.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/libydb-library-aclib.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/testlib/common/libactors-testlib-common.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/resource/liblibrary-cpp-resource.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/log_backend/liblibrary-actors-log_backend.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/prof/liblibrary-actors-prof.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/main/libpython-runtime_py3-main.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/strings/libabseil-cpp-absl-strings.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/memory_log/liblibrary-actors-memory_log.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/svn_version/libpy3library-python-svn_version.global.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/filter/libpy3python-testing-filter.global.a |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_export__forget.cpp |56.6%| PREPARE $(CLANG-2518231432) |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/python/libpy3cpython-symbols-python.global.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/registry/libpython-symbols-registry.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/util/charset/libutil-charset.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/pytest/libpy3library-python-pytest.global.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/libydb-core-viewer.global.a |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_export__cancel.cpp |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/fyamlcpp/libydb-library-fyamlcpp.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libpy3core-scheme-protos.global.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_parquet/libydb-library-arrow_parquet.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/libydb-core-wrappers.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/libpy3libs-config-protos.global.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/protos/libcore-viewer-protos.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/topic_parser/liblibrary-persqueue-topic_parser.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libcolumnshard-common-protos.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/chunks_limiter/libydb-library-chunks_limiter.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/Columns/liblibrary-arrow_clickhouse-Columns.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/core/harmonizer/libactors-core-harmonizer.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/conclusion/libydb-library-conclusion.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/protos/liblibrary-db_pool-protos.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/wilson/liblibrary-actors-wilson.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/libpy3library-folder_service-proto.global.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_lib/libpy3python-testing-yatest_lib.global.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/libydb-library-db_pool.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/benchmark_base/liblibrary-workload-benchmark_base.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/clickbench/liblibrary-workload-clickbench.global.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/liblibrary-folder_service-proto.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/clickbench/liblibrary-workload-clickbench.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/log/liblibrary-workload-log.global.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/http/liblibrary-actors-http.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/validation/liblibrary-formats-arrow-validation.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/mixed/liblibrary-workload-mixed.global.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/libydb-core-tablet_flat.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/log/liblibrary-workload-log.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/core/liblibrary-actors-core.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpch/liblibrary-workload-tpch.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/libcore-kqp-opt.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpc_base/liblibrary-workload-tpc_base.global.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/hash/liblibrary-formats-arrow-hash.a |56.8%| [BI] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/buildinfo_data.h |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpc_base/liblibrary-workload-tpc_base.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpch/liblibrary-workload-tpch.global.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcds/liblibrary-workload-tpcds.global.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/libydb-library-folder_service.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/mock/liblibrary-folder_service-mock.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/util/libyutil.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libydb-library-services.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/liblibrary-mkql_proto-protos.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/mixed/liblibrary-workload-mixed.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/libpy3library-ydb_issue-proto.global.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/csv/converter/libarrow-csv-converter.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/transformer/liblibrary-formats-arrow-transformer.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/simple_builder/liblibrary-formats-arrow-simple_builder.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libpy3yql-dq-proto.global.a |56.9%| PREPARE $(CLANG18-3363451693) |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libpy3dq-actors-protos.global.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/libydb-library-arrow_clickhouse.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/deprecated/read_batch_converter/libpersqueue-deprecated-read_batch_converter.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/pg/libessentials-sql-pg.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/liblibrary-actors-interconnect.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/liblibrary-formats-arrow.a |56.9%| [CC] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/build_info.cpp |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/server/actors/libgrpc-server-actors.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcds/liblibrary-workload-tpcds.a |56.8%| [CC] {default-linux-x86_64, release, asan} $(S)/library/cpp/svnversion/svn_interface.c |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/http_proxy/authorization/liblibrary-http_proxy-authorization.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/server/liblibrary-grpc-server.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_kernels/libydb-library-arrow_kernels.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/libpy3api-grpc.global.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_proxy/libcore-tx-tx_proxy.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/libpy3providers-s3-proto.global.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/libproviders-s3-proto.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/account_lockout/liblibrary-login-account_lockout.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/k8s_api/libpy3tools-cfg-k8s_api.global.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/python/enable_v3_new_behavior/libpy3sdk-python-enable_v3_new_behavior.global.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/logger/libydb-library-logger.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/clients/libpy3tests-library-clients.global.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/datastreams_helpers/libpy3tests-tools-datastreams_helpers.global.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/http_proxy/error/liblibrary-http_proxy-error.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pdisk_io/protos/liblibrary-pdisk_io-protos.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/scalar/liblibrary-formats-arrow-scalar.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libdq-actors-protos.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain64/liblibs-base64-plain64.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/password_checker/liblibrary-login-password_checker.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/naming_conventions/libydb-library-naming_conventions.a |57.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/libydb-library-mkql_proto.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/backtrace/libcontrib-libs-backtrace.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon64/liblibs-base64-neon64.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain32/liblibs-base64-plain32.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/libydb-library-login.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/credentials/libproviders-s3-credentials.a |57.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/mon/liblibrary-schlab-mon.global.a |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_export__create.cpp |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pdisk_io/libydb-library-pdisk_io.a |56.9%| [CC] {default-linux-x86_64, release, asan} $(S)/library/cpp/svnversion/svnversion.cpp |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/signals/libydb-library-signals.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/input_transforms/libdq-actors-input_transforms.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/recipes/common/libpy3library-recipes-common.global.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/switch/liblibrary-formats-arrow-switch.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/ssse3/liblibs-base64-ssse3.a |57.0%| [CC] {default-linux-x86_64, release, asan} $(S)/library/cpp/build_info/build_info_static.cpp |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/protobuf_printer/libydb-library-protobuf_printer.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cctz/tzdata/liblibs-cctz-tzdata.global.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/libpy3ydb-tools-cfg.global.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libcore-issue-protos.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/mon/liblibrary-schlab-mon.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/libydb-library-schlab.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/linuxvdso/libcontrib-libs-linuxvdso.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/signal_backtrace/libydb-library-signal_backtrace.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_core/liblibs-libevent-event_core.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/protos/liblibrary-schlab-protos.a |57.0%| [CC] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/sandbox.cpp |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/cache/liblibrary-login-cache.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/probes/liblibrary-schlab-probes.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/gateway/dummy/libpq-gateway-dummy.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libproviders-common-proto.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/enc/liblibs-brotli-enc.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/query_actor/libydb-library-query_actor.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/security/libydb-library-security.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schoot/liblibrary-schlab-schoot.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libiconv/static/liblibs-libiconv-static.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/Orc/Shared/libExecutionEngine-Orc-Shared.a |57.1%| PREPARE $(GDB) |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schine/liblibrary-schlab-schine.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/MSF/liblib-DebugInfo-MSF.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/avro/liblibs-apache-avro.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/MCJIT/liblib-ExecutionEngine-MCJIT.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/common/libdq-actors-common.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/liblibrary-workload-kv.global.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/abstract/liblibrary-workload-abstract.a |57.1%| PREPARE $(CLANG-1922233694) |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/protos/libyaml-config-protos.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitcode/Writer/liblib-Bitcode-Writer.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lua/libcontrib-libs-lua.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/table_creator/libydb-library-table_creator.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/counter_time_keeper/liblibrary-persqueue-counter_time_keeper.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/liblibrary-workload-stock.global.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/TargetInfo/libTarget-X86-TargetInfo.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/libydb-library-ydb_issue.global.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/libllvm16-lib-Target.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/Symbolize/liblib-DebugInfo-Symbolize.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-core/liblibs-aws-sdk-cpp-aws-cpp-sdk-core.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/liblibrary-workload-kv.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/Disassembler/libTarget-X86-Disassembler.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/liblibrary-workload-stock.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Flask/py3/libpy3python-Flask-py3.global.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/AggressiveInstCombine/liblib-Transforms-AggressiveInstCombine.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/public/liblibrary-yaml_config-public.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/libydb-core-viewer.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/libydb-library-yaml_config.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Coroutines/liblib-Transforms-Coroutines.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/MCParser/liblib-MC-MCParser.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_json/libydb-library-yaml_json.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IRReader/libllvm16-lib-IRReader.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/libapi-grpc.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nayuki_md5/libcontrib-libs-nayuki_md5.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/interface/libproviders-dq-interface.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/libyql-dq-comp_nodes.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nghttp3/libcontrib-libs-nghttp3.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/expr_nodes/libproviders-clickhouse-expr_nodes.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/libyql-dq-actors.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/GlobalISel/liblib-CodeGen-GlobalISel.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/common/libyql-dq-common.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/t1ha/libcontrib-libs-t1ha.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/task_runner/libdq-actors-task_runner.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/simdjson/libcontrib-libs-simdjson.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/state/libyql-dq-state.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/libcontrib-libs-pcre.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/libydb-library-ydb_issue.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-cal/librestricted-aws-aws-c-cal.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/status/libabseil-cpp-absl-status.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ycloud/impl/liblibrary-ycloud-impl.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/arrow/libproviders-common-arrow.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/proto/libproviders-clickhouse-proto.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/type_ann/libyql-dq-type_ann.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/pushdown/libproviders-common-pushdown.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/impl_common/libboost-context-impl_common.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/exception/librestricted-boost-exception.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/db_id_async_resolver/libproviders-common-db_id_async_resolver.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/libcontrib-libs-opentelemetry-proto.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-s3/librestricted-aws-aws-c-s3.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-auth/librestricted-aws-aws-c-auth.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Flask-Cors/py3/libpy3python-Flask-Cors-py3.global.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/random/librestricted-boost-random.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/http_gateway/libproviders-common-http_gateway.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd06/libcontrib-libs-zstd06.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/binsaver/liblibrary-cpp-binsaver.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Passes/libllvm16-lib-Passes.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/events/libdq-actors-events.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/archive/liblibrary-cpp-archive.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/atomizer/libcpp-containers-atomizer.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/bzip/libblockcodecs-codecs-bzip.global.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/actor/libcore-ymq-actor.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/token_accessor/client/libcommon-token_accessor-client.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/token_accessor/grpc/libcommon-token_accessor-grpc.a |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_export_flow_proposals.cpp |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/comptable/liblibrary-cpp-comptable.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/ring_buffer/libcpp-containers-ring_buffer.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/program_options/librestricted-boost-program_options.a |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/ut_sequence/dsproxy_config_retrieval.cpp |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/compact_vector/libcpp-containers-compact_vector.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/serialization/librestricted-boost-serialization.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/transform/libyql-dq-transform.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/config/libproviders-dq-config.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/intrusive_avl_tree/libcpp-containers-intrusive_avl_tree.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/grpc/libdq-api-grpc.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/paged_vector/libcpp-containers-paged_vector.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/accessors/libcpp-deprecated-accessors.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/helper/libproviders-dq-helper.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/common/libproviders-dq-common.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/kmp/libcpp-deprecated-kmp.a |57.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/objcopy_656baae3c1e24959f5bcc457d7.o |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/coroutine/engine/libcpp-coroutine-engine.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/locale/librestricted-boost-locale.a |57.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/objcopy_8e9f839326d1a9224e4b2e15e2.o |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Foundation/liblibs-poco-Foundation.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/codecs/liblibrary-cpp-codecs.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/diff/liblibrary-cpp-diff.a |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/ut_helpers.cpp |57.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/objcopy_5fddfa8f171a3216cad65e02ab.o |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_queue_scheduler_thread.cpp |57.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/objcopy_3d6916930a438b51675ef6dda7.o |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_pool.cpp |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/ref/libinternal-proxies-ref.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/provider/libproviders-clickhouse-provider.a |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_tenants_ut.cpp |57.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/objcopy_c65a9d5efe13dc05c1466090ba.o |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/depth_limiting_yson_consumer.cpp |57.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/objcopy_f580ed931409135de17b6aff8b.o |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/allure-pytest/libpy3contrib-python-allure-pytest.global.a |57.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/objcopy_b8aa61f402be805d2e3e9e75a2.o |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/fs.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_kafka_functions.cpp |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/avx2/libinternal-proxies-avx2.a |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/sentinel_ut.cpp |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/InstCombine/liblib-Transforms-InstCombine.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/protos/libdq-api-protos.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Vectorize/liblib-Transforms-Vectorize.a |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/quoter_resource_tree_ut.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/ut_ycsb.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/bucket_quoter/liblibrary-cpp-bucket_quoter.a |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_serialization.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_transaction_coordinator.cpp |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/compute/libdq-actors-compute.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/runtime/libyql-dq-runtime.a |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/kafka_test_client.cpp |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/libproviders-dq-actors.a |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/proxy/ut_helpers.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/graph/ut/graph_ut.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/proxy/proxy_actor_ut.cpp |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/lib/libblobstorage-ut_blobstorage-lib.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/IPO/liblib-Transforms-IPO.a |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/metarequest_ut.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |57.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/objcopy_12d01741952bd4afa836364d84.o |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/downtime_ut.cpp |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr4-c3/libcontrib-libs-antlr4-c3.a |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_maintenance_api_ut.cpp |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/recipe/libpy3python-testing-recipe.global.a |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/read_only_pdisk.cpp |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/stress_tool/lib/libydb_device_test.a |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cluster_info_ut.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_ut.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/dread_cache_service/ut/caching_proxy_ut.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attributes_stripper.cpp |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/complete/libcommands-interactive-complete.a |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attribute_consumer.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_writer.cpp |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Utils/liblib-Transforms-Utils.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/stat_visualization/libpublic-lib-stat_visualization.a |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_filter.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/balancing_ut.cpp |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/highlight/color/libinteractive-highlight-color.a |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/ut_helpers.cpp |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/ut/common/libpersqueue-ut-common.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/transfer_workload/libtransfer_workload.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/highlight/libcommands-interactive-highlight.a |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tablet_ut.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/parser.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/kesus_quoter_ut.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/topic/libtopic.a |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut.cpp |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/libydb_cli-commands-interactive.a |57.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/objcopy_15e284a8ecb30c90903e842e70.o |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/mirrorer_ut.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/etcd_proxy/main.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_protocol.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/topic_workload/libtopic_workload.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/libconnector-libcpp-ut_helpers.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/cms/libsrc-client-cms.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/oauth/libclient-yc_private-oauth.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/import/liblib-ydb_cli-import.a |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_write_ut.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/etcd_proxy/proxy.cpp |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/patched/replxx/librestricted-patched-replxx.a |57.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/objcopy_93dc3386250916dfae1ecb9b13.o |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/liblib-ydb_cli-dump.a |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_storage_read_request_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/json_proto_conversion_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/bzip2.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/config/libsrc-client-config.a |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/autoscaling_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_collector_ut.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_stats/ut_stats.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/ssl_context.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/actors_ut.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/chunk_queue/libcpp-threading-chunk_queue.a |57.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_bfa810e70cd1de18c5d4a18a62.o |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/interned_attributes.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_designated_consumer.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_background_compaction.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/tests/liblibrary-persqueue-tests.a |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelation_token.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_detail.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/helpers.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/codicil_guarded_invoker.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_node_factory.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/generic/actors/ut/yql_generic_lookup_actor_ut.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/debug/libsrc-client-debug.a |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/delayed_executor.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/permission.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/client.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/serialize.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_merger.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelable_context.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_minikql.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/packet.cpp |57.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/objcopy_e2a089b95d9316f6e26025d3e3.o |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lzma.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/snappy.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/ssl_helpers.cpp |57.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/objcopy_19dadf8afeb30502d735b660ce.o |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/config.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/connection.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zstd.cpp |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/pq_async_io/libtests-fq-pq_async_io.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/etcd_proxy/proto/libetcd-grpc.a |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_rw_lock.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_looper.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_semaphore.cpp |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clapack/part1/liblibs-clapack-part1.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/docs/generator/libpy3olap-docs-generator.global.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/monitoring/libsrc-client-monitoring.a |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/config.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/execution_stack.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/solomon/actors/ut/ut_helpers.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/solomon/actors/ut/dq_solomon_write_actor_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/config.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduler_thread.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_manager.cpp |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/libydb-core-grpc_services.a |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_update.cpp |57.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/objcopy_2f0e0ac8198858b9ec9901778e.o |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ut_export_reboots_s3.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_service.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_alarm.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/profiling_helpers.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_filter.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_log_writer.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_yielder.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_transfer/ut_transfer.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/retrying_periodic_executor.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_affinity.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/schemas.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_poller.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/system_invokers.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/suspendable_action_queue.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/quantized_executor.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/random_access_gzip.cpp |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/libcontrib-libs-hyperscan.a |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_detail.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/throughput_throttler.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/file_log_writer.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/config.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/ares_dns_resolver.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/helpers.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_writer.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/utf8_decoder.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduled_executor.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/compression.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/serializable_logger.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/fluent_log.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/logger_owner.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/formatter.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/log_writer_detail.cpp |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/libclicommands.a |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/fair_share_hierarchical_queue.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/blob_output.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/tls.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_write.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/adjusted_exponential_moving_average.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/zstd_compression.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bitmap.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packed_unsigned_vector.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/codicil.cpp |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/libpy3contrib-libs-googleapis-common-protos.global.a |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_event_managers.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/arithmetic_formula.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bloom_filter.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/checksum.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/error.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/table_creator/table_creator_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pool_allocator.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/coro_pipe.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/digest.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/context.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pattern_formatter.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/load.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hedging_manager.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/histogram.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/linear_probe.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/parser_helpers.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/socket.cpp |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/arch/avx2/libhighwayhash-arch-avx2.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/arch/sse41/libhighwayhash-arch-sse41.a |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/listener.cpp |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/libcontrib-libs-highwayhash.a |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/helpers.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/local_address.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/public.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/profiling/timing.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_def.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_registry.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/descriptors.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_server.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/channel_detail.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/roaming_channel.cpp |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/ut_helpers/libtx-schemeshard-ut_helpers.a |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/retrying_channel.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/stream.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/token.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/allocation_tags.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/static_channel_factory.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/server_detail.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/thread.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/service_discovery/service_discovery.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/throttling_channel.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/viable_peer_registry.cpp |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/opt/libyql-dq-opt.a |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/spin_wait_slow_path_logger.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/stack.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/public.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/utilex/random.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/helpers.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/tokenizer.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_write.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/per_key_request_queue_provider.cpp |57.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/large/objcopy_27c0687ceeb7ce4ff5e4cea90a.o |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service_detail.cpp |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_rtmr/ut_rtmr.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/public.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher.cpp |57.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_3efa41af97c0510be1d2e99f05.o |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/codec.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lz.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/server.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_barrier.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_stream_lookup.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/action_queue.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/coroutine.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/current_invoker.cpp |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ut_utils/libtopic-ut-ut_utils.a |57.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_5db899a01c2ec6f53648af6840.o |57.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/large/objcopy_b0a88dfa3c67850033b8c21ce7.o |57.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/large/objcopy_7eab954373d77ffb1fab95ca0d.o |57.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_5865a174a6c25ca1a2d6386702.o |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/45be6e48ea8f2ac38577085d0d_raw.auxcpp |58.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_9c56ea1b7d34c7d8f6329bfcfd.o |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/stream.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zlib.cpp |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_read_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/forwarding_consumer.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/writer.cpp |58.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/objcopy_253d734e8c901d319d84fcc6e9.o |58.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/objcopy_909bbfbd36bf4d7cf0544f0406.o |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/yson_builder.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/convert.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_test_functions.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/exception_helpers.cpp |58.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/objcopy_7897d1b03fc78e49620c18f81a.o |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token_writer.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_trace.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limiter.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attributes.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_consumer.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/shutdown.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/config.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/serialized_channel.cpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_write_actor_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/response_keeper.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_read_actor_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/service_combiner.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_filtering_consumer.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node_detail.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/null_channel.cpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/peer_discovery.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_rd_read_actor_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message_format.cpp |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/objcopy_f738234258cd034cd5383f92ad.o |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/objcopy_bf6c9c02784d65e20a01685ce8.o |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/bindings.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_visitor.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_client.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_builder.cpp |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/lib/libpy3tests-sql-lib.global.a |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_detail.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/config.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/random.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/caching_channel_factory.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/serialize.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/serialize_dump.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/server.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/config.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/channel.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/statistics_producer.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/virtual.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limits.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistic_path.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/relaxed_mpsc_queue.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_profiler.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistics.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/address.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/public.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_statistics_producer.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_attribute_owner.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_resolver.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/tokenizer.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/proc.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/dialer.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/configurable_singleton_def.cpp |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/etcd_proxy/service/libapps-etcd_proxy-service.a |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/brotli.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/dictionary_codec.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream_pipe.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_thread_pool.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_callbacks.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/notify_manager.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_parser.cpp |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base32/libcpp-string_utils-base32.a |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_executor.cpp |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/objcopy_48884f6b745ced4d3e78997cb1.o |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/future.cpp |58.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/objcopy_53273ad3976098fa8cbd55f5a9.o |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_action_queue.cpp |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/test_meta/libpy3tests-library-test_meta.global.a |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/locks/range_treap_ut.cpp |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/objcopy_5333c1912ecbac0f64ff97551f.o |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/ttl_tiering/objcopy_0664e2ab2eb37ae9f02538e483.o |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/ttl_tiering/objcopy_69bb4174ba5b22bacbabacd799.o |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/ttl_tiering/objcopy_b4d1a41a4041b6372d2a384279.o |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_builder_stream.cpp |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/ttl_tiering/objcopy_3fff1f10d19427b0695d82e1c0.o |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/propagating_storage.cpp |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/docs/generator/objcopy_ac8dbe7f54a2cb7efb6636f75f.o |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tornado/tornado-4/libpy3python-tornado-tornado-4.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/sqs/libpy3tests-library-sqs.global.a |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_detail.cpp |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/fixtures/libpy3tests-library-fixtures.global.a |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/objcopy_7211c23d9494c46f0f60063e9e.o |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tornado/tornado-4/libpy3python-tornado-tornado-4.global.a |58.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/objcopy_791e2f78c18891d943ecce5e41.o |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tbb/libcontrib-libs-tbb.a |58.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/objcopy_1a75593bdb000d1e31dd6e96d5.o |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/etcd_proxy/service/libapps-etcd_proxy-service.global.a |58.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/objcopy_a5c82b9ecb3bf738ea9e628123.o |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/stress_tool/proto/libtools-stress_tool-proto.a |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_util.cpp |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/string/libstring_udf.global.a |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_pool.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/public.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fls.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_queue.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/stress_tool/device_test_tool_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/local_bypass.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/lease_manager.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher_impl.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_scheduler_thread.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_queue.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/nonblocking_batcher.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/pollable_detail.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/single_queue_scheduler_thread.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_throttler.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/dns_resolver.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_move_reboots/ut_move_reboots.cpp |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ut_utils/libpersqueue_public-ut-ut_utils.a |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/crypto.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_output.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/config.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/two_level_fair_share_thread_pool.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hazard_ptr.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/backoff_strategy.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_table_split_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/system_log_event_provider.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/id_generator.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/config.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/zerocopy_output_writer.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/connection.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/process_exit_profiler.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authenticator.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packing.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authentication_identity.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/config.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/request_queue_provider.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/public.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/cache_config.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/protobuf_helpers.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/protocol_version.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/slab_allocator.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/client.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/balancing_channel.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dispatcher.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message_format.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/job_tracker_client/public.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_heap.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/hedging_channel.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/helpers.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/local_address.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_channel.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_fold1.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/client.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dynamic_channel_pool.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/config.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/journal_writer.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_builder_stream.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_fromstring.cpp |58.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/tests/objcopy_1b4bf9f1f46a6111d16337dee0.o |58.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/tests/objcopy_87b299e07b15c86f4f50f458ef.o |58.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/tests/objcopy_f3c323ef80ada193284f036d44.o |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/future.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_grace_join_imp.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_logical.cpp |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/quotas_manager.pb.{h, cc} |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_flatmap.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/codicil_guarded_invoker.cpp |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_tx_scheme.{pb.h ... grpc.pb.h} |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_mapnext.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_map.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/server/msgbus_server_pq_metarequest_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_listfromrange.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/client.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_match_recognize_list.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/spec_patch.cpp |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clapack/part2/liblibs-clapack-part2.a |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_scheme.pb.{h, cc} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/health_config.pb.{h, cc} |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_nop.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_multimap.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_field_subset.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/connection.cpp |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/breakpad/src/client/linux/libsrc-client-linux.a |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_match_recognize.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/method_helpers.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_replicate.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_safe_circular_buffer.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/time_text.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_guess.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_size.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/db_pool.pb.{h, cc} |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_join_dict.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_varitem.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/allocation_tags.cpp |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/marker.pb.{h, cc} |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/request_queue_provider.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/compute.pb.{h, cc} |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/peer_discovery.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/drivemodel.{pb.h ... grpc.pb.h} |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_detail.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_sort.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/hive/timestamp_map.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/backup.{pb.h ... grpc.pb.h} |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/noop_timestamp_provider.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_enumerate.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_tooptional.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_tostring.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_row_reorderer.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_removemember.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/config.cpp |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/croaring/libcontrib-libs-croaring.a |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_ensure.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_base.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/antlr_ast/gen/v1_ansi_antlr4/libantlr_ast-gen-v1_ansi_antlr4.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/antlr_ast/gen/v1_antlr4/libantlr_ast-gen-v1_antlr4.a |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unversioned_row.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_combine.cpp |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console.{pb.h ... grpc.pb.h} |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream_pipe.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop.cpp |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/common/v1/common.{pb.h ... grpc.pb.h} |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/breakpad/src/liblibs-breakpad-src.a |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/balancing_channel.cpp |58.7%| PREPARE $(CLANG16-1380963495) |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authentication_identity.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/response_keeper.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_def.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/schemas.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_output.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_statistics_producer.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/static_channel_factory.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dynamic_channel_pool.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/fair_share_hierarchical_queue.cpp |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/pinger.pb.{h, cc} |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_io_options.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_profiler.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_ytflow_optimize.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/thread.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/service_discovery/service_discovery.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_prepend.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/utilex/random.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_way.cpp |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pattern_formatter.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_block_input.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_pool.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_coalesce.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_element.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_upload_options.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/linear_probe.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/config.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/object_client/public.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/histogram.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/workload.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/system_invokers.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_iterator.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/config.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/codicil.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_reader.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_dynamic_variant.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_content.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/backoff_strategy.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/public.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/client_base.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_update.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/config.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_provider.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_getelem.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_sum.cpp |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |58.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_queue.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/config.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/blob_output.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_invoke.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduled_executor.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/partition_reader.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_withcontext.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/exception_helpers.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistic_path.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_table_session.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/caching_channel_factory.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/descriptors.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_channel.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/record_codegen_cpp.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_blocks.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/node_directory.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packed_unsigned_vector.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_count.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_top_sort.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/connection.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/token.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packing.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/antlr4/libv1-complete-antlr4.a |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/remote_timestamp_provider.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/text/libv1-complete-text.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/libsql-v1-complete.a |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_load_columnar_stats.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/viable_peer_registry.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/static/libcomplete-name-static.global.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/syntax/libv1-complete-syntax.a |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_logical_optimize.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/public.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/helpers.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/static/libcomplete-name-static.a |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/tokenizer.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/packet.cpp |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_registry.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/random.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/shutdown.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_udf.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_squeeze_state.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_queue.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_coalesce.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/file_client/config.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/batching_timestamp_provider.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/access_control.cpp |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/load.cpp |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/cpp/src/arrow/python/libpy3src-arrow-python.a |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_status_codes.pb.{h, cc} |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/channel_detail.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/coro_pipe.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_filter.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_merger.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/public.cpp |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_events.pb.{h, cc} |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/config.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/helpers.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_physical_finalizing.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/static_service_dispatcher.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/null_channel.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_attribute_owner.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_switch.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/stack.cpp |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_pure/libv1-lexer-antlr4_pure.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_pure_ansi/libv1-lexer-antlr4_pure_ansi.a |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/permission.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/delegating_client.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_consumer.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_squeeze_to_list.cpp |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/persqueue_error_codes_v1.pb.{h, cc} |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/login.pb.{h, cc} |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_table_client.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/table_mount_cache.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node_detail.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/hydra/version.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/persistent_queue.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/adapters.cpp |59.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/minikql.{pb.h ... grpc.pb.h} |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_designated_consumer.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/validator.cpp |59.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_slot_broker.{pb.h ... grpc.pb.h} |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/reflect/libsql-v1-reflect.global.a |59.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_todict.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/lexer.cpp |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/reflect/libsql-v1-reflect.a |59.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_null.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/timestamp_provider_base.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/columnar_statistics.cpp |59.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/checkpoint_coordinator.pb.{h, cc} |59.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_now.cpp |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/ypath/parser_detail.cpp |59.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/feature_flags.{pb.h ... grpc.pb.h} |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/column_sort_schema.cpp |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/composite_compare.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_decimal_mod.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_scheduler_thread.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/forwarding_consumer.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_pool.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_join_impl.cpp |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/generator.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/helpers.cpp |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/lib/cmds/libpy3tools-lib-cmds.global.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/ydb_recipe/libpy3ydb_recipe.global.a |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/common/libpy3functional-postgresql-common.global.a |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/ydb_recipe/objcopy_c55121179eeb3b5753498290c4.o |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/objcopy_93197284f82f9ae9fc0256ee95.o |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/objcopy_816e2dba53f55d924139cdb3c5.o |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/objcopy_e4166f3d104a6751b45e7e712f.o |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/public.cpp |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/blob_reader.cpp |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client_cache.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_fromyson.cpp |59.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/test_connection.pb.{h, cc} |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_join.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_map.cpp |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_fuse.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/etc_client.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_weak_fields.cpp |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/sfh/libcpp-digest-sfh.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/lib/libpy3tests-datashard-lib.global.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/common/libpy3tests-stress-common.global.a |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/tests/objcopy_49a1ca9559288648fba9cf7b65.o |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/workload/type/libpy3oltp_workload-workload-type.global.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/workload/libpy3stress-oltp_workload-workload.global.a |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/tests/objcopy_895e78a038dc7069fda56c2e82.o |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/tests/objcopy_0446f521b26a2e8128f94ac50f.o |59.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_next_value.cpp |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_server.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/address_helpers.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_collect.cpp |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/objcopy_340b457b8174f6293d5748588e.o |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/queue_transaction_mixin.cpp |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_throttler.cpp |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/objcopy_5923b362516b6632b9769a5db2.o |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/objcopy_6b37760fb6a28054d0feafd61d.o |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_partition.cpp |59.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/private_proxy.pb.{h, cc} |59.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_condense1.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/merge_table_schemas.cpp |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_buffer.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/yson_format_conversion.cpp |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/http_api_client/libpy3fq-libs-http_api_client.global.a |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/simplejson/py3/libpy3python-simplejson-py3.a |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/connection_impl.cpp |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/simplejson/py3/libpy3python-simplejson-py3.global.a |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unversioned_value.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/read_limit.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/check_type_compatibility.cpp |59.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/basic_example/public-sdk-cpp-tests-integration-basic_example |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/retrying_periodic_executor.cpp |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyarrow/libpy3contrib-python-pyarrow.global.a |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/journal_client/public.cpp |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_consumer.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_detail.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/wire_protocol.cpp |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |59.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_rh_hash.cpp |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/commands/libcommands.a |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/main.cpp |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/objcopy_774cbd1f10ee287899289ecb3f.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/objcopy_1515671fe2dfb16894dfbe901e.o |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/objcopy_1c0f807c059fe226699115f242.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/objcopy_790c6ea4aad5e761d21421b25d.o |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/storage.pb.{h, cc} |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_3209cda00462f2963f3cbbc912.o |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_7eade8c49389813f8c36b72b5b.o |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_ec3163328cb5ab8f222e66dd41.o |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_4f92526e13553482736b942b2c.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_d1ba757d227a70ff4910717854.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/objcopy_242486256e1af973cd1d5376d1.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/objcopy_9a3dabea847c21e0b4fa4cda26.o |59.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/df691ac52d0b755cb039db39b5_raw.auxcpp |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/objcopy_4ea639aebd19c36ee3cdb4479d.o |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/libpy3functional-sqs-merge_split_common_table.global.a |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/objcopy_2efdf95387a81f55cf9c81071a.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/objcopy_cca8dcd66462c9ca3c57fcb78e.o |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/objcopy_3e8bf44ed681ff82ae143aaec3.o |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/objcopy_b34c6a8a5501db208eebc5d8e4.o |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/log/tests/objcopy_4f9d76a39d2f7ba2b9f198f28c.o |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/log/tests/objcopy_2f7ac0f750374152d13c6bfbcf.o |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/log/tests/objcopy_854d6cc7a0cc5cdd793cfc1e6d.o |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/tests/objcopy_5294a064c14cf5a49516321590.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/tests/objcopy_71d73932c95681fccfc7215041.o |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyarrow/libpy3contrib-python-pyarrow.a |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/tests/objcopy_c7c229be41e9b028572ad1aab3.o |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/docker/libpy3contrib-python-docker.global.a |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/prctl/libpy3library-python-prctl.global.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/prctl/libpy3library-python-prctl.a |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_resolver.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/tokenizer.cpp |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_unknown_fields.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/propagating_storage.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/ypath/rich.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/dialer.cpp |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/execution_stack.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/delayed_executor.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasource.cpp |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/column_family/compression/objcopy_1ab2a5a6dd84a6c9ff5d5c50b0.o |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/column_family/compression/objcopy_3bdea7737a87c43bfaa0aaf4c3.o |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/column_family/compression/objcopy_f42b1add98328abd34a53e4aef.o |59.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_factory.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/query_client/query_statistics.cpp |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_block_output.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_batch_writer.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_condense.cpp |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/objcopy_023a23fcfdf79043d814bb8aab.o |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_batch_reader.cpp |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/objcopy_45b6981aed17dda33d43217f52.o |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/stub/libudf-service-stub.global.a |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/objcopy_9818d2b70aad7db98a0f9c044c.o |59.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/e1ff312a3308444783623a7c6e_raw.auxcpp |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_496e4638abf3c5ef12eafab52c.o |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_6b8c453743f8fd2c5380af70c6.o |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_f363a941fa24746cadffc60594.o |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_696078ddd4c2d0788472b3ebfe.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_b08299d456f3448b368e814cb8.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_8f2fbd9f79880fbfa3c1838d80.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_0e928e66807fd553d7fcaa58a3.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_5dc9c76fd90ae0562084321e87.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_52476c20dac0af4f59edc2917e.o |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/yql/essentials/tools/sql2yql/sql2yql |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/check/libv1-lexer-check.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/regex/libv1-lexer-regex.a |59.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_weakmember.cpp |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yql/essentials/tools/sql2yql/sql2yql.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/protocol.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_hopping.cpp |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/objcopy_0aefef587c181350d3a25f70e0.o |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card_serialization.cpp |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/objcopy_c068ee86eb127df13256bfbe45.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/objcopy_7c328c2741f9dd7697a2e0e8b1.o |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_exec.cpp |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/objcopy_4c839b0fc6ee60e0bb4adc7079.o |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/objcopy_994fcbd53c4e2174c302bdb5ab.o |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/objcopy_6f577a0a3d7a659599df51626e.o |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/objcopy_49bad8251d240ad7c49d384b91.o |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/objcopy_181bdcd1743e9a1a78fafe4b60.o |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/large/objcopy_703c8e1d9a9a2b271b8b995a29.o |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/load/lib/libpy3olap-load-lib.global.a |59.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/e294827eb799173498fe26d398_raw.auxcpp |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/large/objcopy_b90d4f51856a4b346b62877f37.o |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/objcopy_ef822f612b696eb514a5565056.o |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/large/objcopy_912038ceef7de48e0e15c25307.o |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/objcopy_452efd8b0828678a61ff4e0569.o |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/objcopy_2cc418e8604751e5b8f9029a81.o |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/objcopy_817637e20254f271e9be62d14e.o |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/large/objcopy_ddabc037fdef8d5f7c6c2f9b47.o |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/lib/libpy3functional-tpc-lib.global.a |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/objcopy_3e7b0e88092417daa72b89bfde.o |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/utils/types/libpy3tests-utils-types.global.a |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/objcopy_1574e8a5a6c530c7bfd6378c4d.o |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/utils/libpy3fq-generic-utils.global.a |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/large/objcopy_bac05c8b5a79735451f58d9322.o |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/utils/libpy3connector-tests-utils.global.a |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/objcopy_504b845d57f1a23561e970de61.o |59.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_backup.pb.{h, cc} |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_writer.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limits.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_grace_join.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/io_tags.cpp |59.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base.{pb.h ... grpc.pb.h} |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/table_mount_cache_detail.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/transaction.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_skiptake.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/security_client.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_container.cpp |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |59.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_unwrap.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_map_join.cpp |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/top_keeper/libcpp-containers-top_keeper.a |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/objcopy_178e64ce5db822fc6aa8b3e608.o |59.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk.{pb.h ... grpc.pb.h} |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/objcopy_a54664d42025a3be375f961b82.o |59.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_config.{pb.h ... grpc.pb.h} |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/objcopy_0a1f127d9343562caddfbacf79.o |59.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage.{pb.h ... grpc.pb.h} |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/objcopy_79d897640a3a634a87f173e2f4.o |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/python/yt/libpy3yt-python-yt.global.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/python/yt/type_info/libpy3python-yt-type_info.global.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/python/yt/yson/libpy3python-yt-yson.global.a |59.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/7179c606fb7373cb8f04d9971a_raw.auxcpp |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/objcopy_6a5c78aa9f679a0920be5264fe.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/objcopy_d0255dda539959b69d421868a2.o |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/ydb-tests-olap |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/objcopy_173de88696c8239b22567e7ece.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/objcopy_96b8686cd075e874d95d4aa5c5.o |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/notify_manager.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/uuid_text.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/helpers.cpp |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card_cache.cpp |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/data_quotas/ydb-tests-olap-data_quotas |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/current_invoker.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/acl.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_type_ann.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_constraints.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/32049c3ef1f885f0e34984b3bf_raw.auxcpp |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_363bd76b3bc04955e01c402ffc.o |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_c43ce24509a50b033fa4050a33.o |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/objcopy_133cf22e149f008572dab30674.o |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/objcopy_51000f45ee1f1ab0908a7e71c9.o |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/objcopy_f05ead59375a9db120b95dd730.o |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/data_quotas/objcopy_862077c20b8d599559a861c2df.o |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/data_quotas/objcopy_a6e393b6d53f4c73feac80b55c.o |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_6cfba3dbee97ec121b2f346459.o |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/objcopy_e9b644ce7912a7deb04bbf40a8.o |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/objcopy_1aeeb50f676472f975830c135d.o |59.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/e01aded916ad04e888f13223cf_raw.auxcpp |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/objcopy_903d4758faea71f1363e296b3f.o |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/objcopy_c77713875cf17988efd8fc0fb3.o |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_dad83df4ab6e900f5632eadf2b.o |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_8d06c552fa5830dcf8274026f0.o |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_ce073e3cc612363936bdd04210.o |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_7e7e709046fe8acad91d924675.o |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/data_quotas/objcopy_4b2e093abff756c97b675c0a31.o |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_1d56c9a5fd80acfc9c28483f06.o |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/random_access_gzip.cpp |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/adaptive/protos/libhistogram-adaptive-protos.a |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/logger_owner.cpp |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/adaptive/libcpp-histogram-adaptive.a |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/public.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/compression.cpp |59.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_tenant.{pb.h ... grpc.pb.h} |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/columnar.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/writer.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/bindings.cpp |59.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_schemeshard.{pb.h ... grpc.pb.h} |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/stream.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasource_type_ann.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/dns_resolver.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key.cpp |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/hyperloglog/liblibrary-cpp-hyperloglog.a |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_filter.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasource_exec.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_forwarding_gateway.cpp |59.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kesus.{pb.h ... grpc.pb.h} |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_0a29eb8c456ab5b998f2d12ba1.o |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_9ea5b1fb7a4f8e1b0b8d7cf345.o |59.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_physical.{pb.h ... grpc.pb.h} |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_dq_hybrid.cpp |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_34efc91ed920a8b27d971c44a6.o |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydb/ydb |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest_main/libcpp-testing-gtest_main.a |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_b9fd5c62781ec3b78d111a0ba7.o |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest/libcpp-testing-gtest.a |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_a5874452d3dbd6f6e49cd08be6.o |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/config.cpp |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/objcopy_277b7e8f79021687bec95be8db.o |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/objcopy_0359848ae21601186c5b0d9873.o |59.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_export.pb.{h, cc} |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/objcopy_88a0e187d0d0f8235a5e3f2fff.o |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/7fdc9492198d5f306aa05e0de1_raw.auxcpp |59.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/objcopy_3ddbad334a37a829b3772ddb05.o |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/library/libpy3tests-postgres_integrations-library.global.a |59.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/objcopy_2b60b599fc27771d93e79090fc.o |59.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/objcopy_4352b8b3e3cf61532c865b371b.o |59.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/local.{pb.h ... grpc.pb.h} |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/public.cpp |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/journal_client.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/internal_client.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client_common.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser_deserialize.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/roaming_channel.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_lambda.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_if.cpp |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |59.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_limits.{pb.h ... grpc.pb.h} |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/formatter.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/private.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/query_tracker_client.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/tls.cpp |59.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_broker.{pb.h ... grpc.pb.h} |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/unistat/libmonlib-encode-unistat.a |59.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_pq.{pb.h ... grpc.pb.h} |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_poller.cpp |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/testing/group_overseer/libblobstorage-testing-group_overseer.a |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_testshard/main.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/huge.cpp |59.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/netclassifier.{pb.h ... grpc.pb.h} |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/monlib/libpy3library-python-monlib.global.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/monlib/libpy3library-python-monlib.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/pire/libcpp-regex-pire.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/xz/libcpp-streams-xz.a |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_io_discovery.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/zstd_compression.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/journal_reader.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/main.cpp |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |59.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_database.{pb.h ... grpc.pb.h} |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/timezone_conversion/liblibrary-cpp-timezone_conversion.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/normalization/libcpp-unicode-normalization.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/set/libcpp-unicode-set.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/tld/liblibrary-cpp-tld.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/punycode/libcpp-unicode-punycode.a |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/minikql_compile/yql_expr_minikql_compile_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/pg_ydb_proxy.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/pgwire.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/pg_ydb_connection.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydbd/export.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydbd/main.cpp |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/objcopy_d009f62008041e2f09cdbf7def.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/objcopy_a388edbfdbfbac232101c7f510.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/objcopy_e5d897582dc0fbda7c578cb53f.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/objcopy_b06d27009e49b9ba3df883a226.o |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/library/libpy3tools-nemesis-library.global.a |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/objcopy_41295709119857c2e0f1a41f31.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/objcopy_c98e5b95c64b8486a12f10d408.o |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/objcopy_6508d12aaafde6f0a60fe8fff3.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/objcopy_96d3f689fe7d6b16f9da31e376.o |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/ut_helpers/liblibs-quota_manager-ut_helpers.a |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/objcopy_cd9abca883cad9b25e20bf2f08.o |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/s3/ut/s3_aws_credentials_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/driver_lib/run/auto_config_initializer_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_event_managers.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/blobstorage_node_warden_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut_fat/dsproxy_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_test_functions.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_proxy/ut/control_plane_proxy_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/sentinel_ut_unstable.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/gc_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_state_storage_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_change_schema_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_checkpoint_storage_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/bind_queue_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_restart_tablet_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_execute_mkql_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/storage_service_ydb_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/backup/impl/local_partition_reader_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_fat.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_contains.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/checksum.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_cbo_helpers.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/common.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_fetcher_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/public_http/http_router_ut.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/hive_metastore/libcore-external_sources-hive_metastore.a |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.{pb.h ... grpc.pb.h} |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_block_io_utils.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_flow.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_func.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_client_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/wire_row_stream.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/gateway/ut/metadata_conversion.cpp |58.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot.{pb.h ... grpc.pb.h} |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/adjusted_exponential_moving_average.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_finalize.cpp |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base3.{pb.h ... grpc.pb.h} |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ru_calculator/ut_ru_calculator.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/leader_election_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/ut/queue_id_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/ut/params_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/coordinator_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/topic_session_ut.cpp |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/federated_query/common/libut-federated_query-common.a |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/row_dispatcher_ut.cpp |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/hive_metastore/hive_metastore_native/libexternal_sources-hive_metastore-hive_metastore_native.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/ut/common/libworkload_service-ut-common.a |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_counters_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_patch_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_quorum_tracker_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_sequence_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_get_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/jaeger_tracing_configurator_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/memory_controller/memory_controller_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/memory_controller/memtable_collection_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sequenceshard/ut_helpers.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_constraints_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_acl_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_activity_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sequenceshard/ut_sequenceshard.cpp |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/ut_common/libcore-statistics-ut_common.a |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/ut_bscontroller/main.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/sanitize_groups.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/ds_proxy_lwtrace.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/decommit_3dc.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/assimilation.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/counting_events.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_slice.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_clock_pro_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pqrb_describes_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/fetch_request_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/list_all_topics_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/counters_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_scheme_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/sha256_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/infly_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/attributes_md5_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/metering_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/message_delay_stats_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/cache_eviction_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/slow/autopartitioning_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/slow/pq_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/certificate_check/cert_check_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/time_cast/time_cast_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/certificate_check/cert_utils_ut.cpp |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_resolver_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_req_blockbs_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/scrub.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_pipecache_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_counters_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_stats.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_pipe_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_counters_aggregator_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/bootstrapper_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_range_ops.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/resource_broker_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/database/ut/ut_database.cpp |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/ut/objcopy_1d0482d354dc270d18e7123281.o |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partition_ut.cpp |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/tests/objcopy_2de2accab39327e9b10680901f.o |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_inplace_update_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ticket_parser_ut.cpp |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/workload/libpy3stress-simple_queue-workload.global.a |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_change_exchange.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/pipe_tracker_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_metrics_ut.cpp |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/hooks/testing/libcolumnshard-hooks-testing.a |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_flowcontrol_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_ut_trace.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ut_cdc_stream_reboots.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_reassign.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_scan_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/ut_vector_index_build_reboots.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_split_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_move/ut_move.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/query_actor/query_actor_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_write_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_effects_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_immediate_effects_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/metering_sink_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/make_config.cpp |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/tests/objcopy_2492aafb6862566a2398c9f27e.o |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_upload_rows.cpp |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/tests/objcopy_3df021aac8504049c53286aea0.o |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/microseconds_sliding_window_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/batch_operations/kqp_batch_update_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_rs.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/batch_operations/kqp_batch_delete_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_compaction/ut_compaction.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ut_data_erasure_reboots.cpp |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base_reboots/ut_base_reboots.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_serverless/ut_serverless.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partition_chooser_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/mediator/mediator_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pq_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_replication/ut_replication.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator_ext_blobs.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_info_types.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/config/bsconfig_ut.cpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_vector_index_build.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator_ut_helpers.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_olap/ut_olap.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_index_build.cpp |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator.cpp |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/yql_testlib/libydb-core-yql_testlib.a |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_data_source/ut_external_data_source.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_olap_reboots/ut_olap_reboots.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_s3_plan_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ut_rtmr_reboots.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_base.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/s3_recipe_ut_helpers.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_scheme_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_query_ut.cpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_restore/ut_restore.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_normalizer.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_snapshot.cpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_vector_index.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_unique_index.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/partition_end_watcher_ut.cpp |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/service_mocks/ldap_mock/libtestlib-service_mocks-ldap_mock.a |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_pragma_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_async_index.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/ydb_proxy_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_flip_join_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/ut_sequence/datashard_ut_sequence.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_yql_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_continuous_backup/ut_continuous_backup.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ut_bsvolume_reboots.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_columnshard_read_write.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_columnshard.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_index_lookup_join_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_auditsettings/ut_auditsettings.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_datashard.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_columnshard.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_datashard.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/schemereq_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ut_subdomain_reboots.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_user_attributes/ut_user_attributes.cpp |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/ut_helpers/libtx-replication-ut_helpers.a |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/pg/pg_catalog_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_scripting_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_commit_redo_limit.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_pq_reboots/ut_pq_reboots.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_decimal_types.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_pg_types.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_backup.cpp |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/test/libs/rows/libtest-libs-rows.a |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_login/ut_login.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/subscriber_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_row_versions_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/quota_tracker_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pqtablet_mock.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/sourceid_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/utils_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/user_info_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pqtablet_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/access_service_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/user_account_service_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/folder_service_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/service_account_service_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/type_codecs_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/stream_creator_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/yc_search_ut/test_events_writer.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_gclogic_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_leases_ut.cpp |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/test/libs/table/libtest-libs-table.a |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/yc_search_ut/index_events_processor_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/flat_test_db.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_cxx_database_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_database_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_range_cache_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_datetime.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_s3fifo_ut.cpp |58.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/ut/objcopy_9f29b589555ed64086e5eadccf.o |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_handle_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_btree_index_iter_charge.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_table_part_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_shared_sausagecache.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_switchable_ut.cpp |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/breakpad/libydb-library-breakpad.global.a |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_bloom.cpp |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_self.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_charge.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_compaction.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_btree_index_nodes.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_db_iface.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_data_cleanup.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_comp_gen.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_compaction_multi.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_db_scheme.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_other.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/long_tx_service/long_tx_service_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_memtable.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_decimal.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_forward.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_iterator.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_part.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_pages.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_rename_table_column.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_proto.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/quota_requester.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/partition_stats/partition_stats_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_screen.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_redo.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_large.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/discover.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/wrappers/s3_wrapper_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_sausage.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partitiongraph_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_part_multi.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/internals_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/server.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ncloud/impl/access_service_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_slice_loader.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_common.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_stat.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_versions.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/balance_coverage/balance_coverage_builder_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_col_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/main.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/deadlines.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/recovery.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/extra_block_checks.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/encryption.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/gc_quorum_3dc.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/index_restore_get.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/gc.cpp |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/keys/libydb-library-keys.a |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_order.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/incorrect_queries.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/get.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/health_check/health_check_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/group_reconfiguration.cpp |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/tools/kqprun/kqprun.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/monitoring.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/patch.cpp |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/src/proto/libkqprun-src-proto.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/runlib/libtools-kqprun-runlib.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/src/libtools-kqprun-src.a |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_insert_table.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/helper.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_script.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/mirror3dc.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/main.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/net_classifier_updater_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/mirror3of4.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/multiget.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/ut_helpers.cpp |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_logs_engine.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/sync.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/olap/high_load/read_update_write.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/shred.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/scrub_fast.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_program.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/quoter_performance_test/main.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/vdisk_malfunction.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/validation.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/space_check.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/snapshots.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/common_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_export/ut_export.cpp |58.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/objcopy_1406195445f45d950dda89fcd8.o |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/defrag.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/get_block.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/trace_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compress_executor_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/acceleration.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/retry_policy_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/http/ut/xml_builder_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/describe_topic_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/control/immediate_control_board_actor_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/feature_flags_configurator_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/local_partition_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/block_race.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_tracker_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/basic_usage_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_bootstrapped_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/configs_dispatcher_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/test_protocols_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/generic_ut/iceberg_ut_data.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/topic_to_table_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/test_interconnect_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/immediate_controls_configurator_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_tables_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_aggregation/ut_aggregate_statistics.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/log_settings_configurator_ut.cpp |58.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/oom/objcopy_a0543c2dc30365e9b2ad3d0ca6.o |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_actors_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/generic_ut/kqp_generic_provider_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/arrow/kqp_types_arrow_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/read_session_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/console_ut_tenants.cpp |58.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/oom/objcopy_0221134ccf2a949614ce2a2056.o |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ut_helpers/libpublic-lib-ut_helpers.a |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/configs_cache_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/arrow/kqp_arrow_in_channels_ut.cpp |58.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/oom/objcopy_e0331f455507fe5ac3b71d0537.o |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/modifications_validator_ut.cpp |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/data_integrity/kqp_data_integrity_trails_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_request_reporting_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_yielder.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_trackable.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/system_log_event_provider.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/ut_fat/blobstorage_node_warden_ut_fat.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/quantized_executor.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_put_ut.cpp |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrap.{pb.h ... grpc.pb.h} |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/ares_dns_resolver.cpp |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color.{pb.h ... grpc.pb.h} |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_condense.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/alloc.{pb.h ... grpc.pb.h} |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/config.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_log_writer.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_affinity.cpp |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/operations.{pb.h ... grpc.pb.h} |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/cms.{pb.h ... grpc.pb.h} |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/helpers.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/console_ut_configs.cpp |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config.{pb.h ... grpc.pb.h} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compaction.{pb.h ... grpc.pb.h} |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters.{pb.h ... grpc.pb.h} |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/codec.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lz.cpp |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_datashard.{pb.h ... grpc.pb.h} |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/fq.pb.{h, cc} |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/stream.cpp |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/import.{pb.h ... grpc.pb.h} |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_pipe.{pb.h ... grpc.pb.h} |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/pgwire/pgwire |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_cms.pb.{h, cc} |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/cache_config.cpp |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_transport.pb.{h, cc} |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/config.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/two_level_fair_share_thread_pool.cpp |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/config/ut/ydb-services-config-ut |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/read_actors_factory.pb.{h, cc} |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_epoch.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lzma.cpp |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/type_info.{pb.h ... grpc.pb.h} |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/roaring/libroaring.global.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/mkql_simple_file/libproviders-common-mkql_simple_file.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/knn/libknn_udf.global.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/datetime/libdatetime_udf.global.a |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build_reboots/ut_index_build_reboots.cpp |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/re2/libre2_udf.global.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_topic/include/libclient-ydb_topic-include.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/digest/libdigest_udf.global.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/yson2/libyson2_udf.global.a |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/dynamic_config/dynamic_config_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_column_build/ut_column_build.cpp |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/datetime2/libdatetime2_udf.global.a |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_bsvolume/ut_bsvolume.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_ext_blobs_multiple_channels.cpp |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/config.pb.{h, cc} |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/file_log_writer.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_change_collector.cpp |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/file/libyt-gateway-file.a |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/persqueue_common.pb.{h, cc} |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_subdomain/ut_subdomain.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_errors.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_import_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_query_ut.cpp |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_login_large/ut_login_large.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_horizontal_join.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_index_table_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/storage_tenant_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_logstore_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_bulk_upsert_olap_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_bulk_upsert_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_login_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_ldap_login_ut.cpp |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/kqprun |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_coordination_ut.cpp |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_object_storage_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_olapstore_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_monitoring_ut.cpp |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_register_node_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_scripting_ut.cpp |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/data.pb.{h, cc} |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_stats_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_ut.cpp |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/sensitive.pb.{h, cc} |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/config.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/throughput_throttler.cpp |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_tasks.pb.{h, cc} |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/services.{pb.h ... grpc.pb.h} |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_table_ut.cpp |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/interconnect.pb.{h, cc} |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_log.{pb.h ... grpc.pb.h} |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_proxy.{pb.h ... grpc.pb.h} |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/stream.{pb.h ... grpc.pb.h} |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/snapshot.pb.{h, cc} |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/blob_range.pb.{h, cc} |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/ssa.pb.{h, cc} |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_state_load_plan.pb.{h, cc} |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/audit.pb.{h, cc} |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_config.pb.{h, cc} |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/row_dispatcher.pb.{h, cc} |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_query.pb.{h, cc} |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_query_stats.pb.{h, cc} |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/file_storage.pb.{h, cc} |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot_config.{pb.h ... grpc.pb.h} |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/s3_recipe_helper/liblibrary-testlib-s3_recipe_helper.a |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/tools/fqrun/fqrun.cpp |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_backup.{pb.h ... grpc.pb.h} |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/token_accessor.pb.{h, cc} |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/fields.pb.{h, cc} |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/watermark_runtime_data.cpp |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/fqrun/src/libtools-fqrun-src.a |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/external_sources.{pb.h ... grpc.pb.h} |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/common.pb.{h, cc} |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_controller_config.{pb.h ... grpc.pb.h} |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/long_tx_service.{pb.h ... grpc.pb.h} |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx.{pb.h ... grpc.pb.h} |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/rate_limiter.pb.{h, cc} |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_status_codes.pb.{h, cc} |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/validate_logical_type.cpp |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_issue_message.pb.{h, cc} |58.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/objcopy_8120ef49e7e653ed0601604313.o |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_topic.pb.{h, cc} |58.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/objcopy_d1dee10c0c00d50989b086bd3f.o |58.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/objcopy_d3af02c7d57ea2cbbe5d381baa.o |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.{pb.h ... grpc.pb.h} |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_alarm.cpp |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/resource/v1/resource.{pb.h ... grpc.pb.h} |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_operation.pb.{h, cc} |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelation_token.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/chunk_replica.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_seq.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_group.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_exists.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/column_rename_descriptor.cpp |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/task_controller.pb.{h, cc} |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_length.cpp |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/workload_manager_config.{pb.h ... grpc.pb.h} |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_lazy_list.cpp |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/private_api.pb.{h, cc} |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/issue_id.pb.{h, cc} |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_columnshard.{pb.h ... grpc.pb.h} |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/config.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_map_join.cpp |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/gateways_config.pb.{h, cc} |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_random.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/server.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_reverse.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/provider/yql_yt_op_settings.h_serialized.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fls.cpp |58.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/bulk_upsert/ydb-public-sdk-cpp-tests-integration-bulk_upsert |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_toindexdict.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher_impl.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_exists.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_detail.cpp |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_barrier.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_filter.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_action_queue.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/profiling/timing.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/error.cpp |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/helpers.cpp |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/zerocopy_output_writer.cpp |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ydb-core-tx-schemeshard-ut_data_erasure_reboots |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/relaxed_mpsc_queue.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/listener.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_output.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bitmap.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_dynamic_table_writer.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_queue.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_optimize.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/digest.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_data_erasure/ut_data_erasure.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/fs.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_while.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/config.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_service.cpp |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/parser_helpers.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/id_generator.cpp |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/helpers.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_writer.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/service_combiner.cpp |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelable_context.cpp |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_gateway.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_table_desc.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_mkql_compiler.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/convert.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/config.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_hasitems.cpp |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_chain1_map.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/server.cpp |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/serialize.cpp |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/slab_allocator.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/protocol_version.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/helpers.cpp |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/serialized_channel.cpp |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_reader.cpp |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/sticky_transaction_pool.cpp |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/compress_base/lib/libcommon-compress_base-lib.a |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/compress_base/libcompress_udf.global.a |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/shuffle_client.cpp |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_logical.cpp |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/queue_rowset.cpp |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/control/ut/ydb-core-control-ut |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/producer.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/serialize.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_range.cpp |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/static/libcommon-stat-static.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperscan/libhyperscan_udf.global.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/libstat_udf.global.a |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/ip_base/lib/libcommon-ip_base-lib.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/histogram/libhistogram_udf.global.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/ip_base/libip_udf.global.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperloglog/libhyperloglog_udf.global.a |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json/libjson_udf.global.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/lib/libcommon-math-lib.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/pire/libpire_udf.global.a |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/logs/dsv/libdsv_udf.global.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/libmath_udf.global.a |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json2/libjson2_udf.global.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/lib/libcommon-unicode_base-lib.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/lib/libcommon-url_base-lib.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/static/libcommon-topfreq-static.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/set/libset_udf.global.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/libtopfreq_udf.global.a |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/public.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hedging_manager.cpp |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup_collection/ut_backup_collection.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/interned_attributes.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attributes.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/server_detail.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schema_serialization_helpers.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_physical_optimize.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/journal_client/config.cpp |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/liburl_udf.global.a |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/object_client/helpers.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attribute_consumer.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/activation.pb.{h, cc} |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_skip.cpp |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/libunicode_udf.global.a |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/signature.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/packet.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_pool.{pb.h ... grpc.pb.h} |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/chunk_stripe_statistics.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/gateways.pb.{h, cc} |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/throttling_channel.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_multihopping.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/depth_limiting_yson_consumer.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key_bound_compressor.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_integrity_trails.{pb.h ... grpc.pb.h} |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_key_range.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/auth.{pb.h ... grpc.pb.h} |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/spin_wait_slow_path_logger.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_chain_map.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token_writer.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_queue_scheduler_thread.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/dynamic_table_transaction_mixin.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/options.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/operation_client.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_decimal.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_helper.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/top/libtop_udf.global.a |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_manager.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rowset.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/connection.cpp |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service_detail.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_ytql.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_lookup.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_callbacks.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/job_tracker_client/helpers.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/per_key_request_queue_provider.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_frombytes.cpp |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/helpers.cpp |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/objcopy_10b0cfa01297f7d7392eb4d9e4.o |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/objcopy_1c18035bb4b3759d5e029db746.o |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/objcopy_b306c2955ce13e6db6cae73363.o |58.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_chain_map.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/bundle_controller_client/bundle_controller_settings.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_intent_determination.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_provider_impl.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_reduce.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/public.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_extend.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_builder.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/nonblocking_batcher.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_decimal_mul.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/configurable_singleton_def.cpp |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/fqrun/fqrun |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet.{pb.h ... grpc.pb.h} |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_semaphore.cpp |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/helpers.cpp |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |58.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_dictitems.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_some.cpp |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |58.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_take.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser.cpp |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/table_partition_reader.cpp |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/http_config.{pb.h ... grpc.pb.h} |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/ready_event_reader_base.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attributes_stripper.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_map.cpp |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp.{pb.h ... grpc.pb.h} |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statestorage.{pb.h ... grpc.pb.h} |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/lease_manager.cpp |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |58.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_callable.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/producer_client.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_filter.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduler_thread.cpp |58.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/key.{pb.h ... grpc.pb.h} |58.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_minmax.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/system_attribute_provider.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/transaction_impl.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/context.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/serializable_logger.cpp |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statistics.{pb.h ... grpc.pb.h} |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_op_hash.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_write.cpp |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_looper.cpp |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |58.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_persqueue_v1.pb.{h, cc} |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher.cpp |58.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_monitoring.pb.{h, cc} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus.{pb.h ... grpc.pb.h} |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_dq_integration.cpp |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |58.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_chopper.cpp |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/keyvalue/grpc_service_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schema.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_thread_pool.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/bundle_controller_client/bundle_controller_client.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/election/public.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/null_consumer.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/brotli.cpp |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydbd/ydbd |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/base.{pb.h ... grpc.pb.h} |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/serialize.cpp |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |58.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_tobytes.cpp |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |58.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_join.cpp |58.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_stats.{pb.h ... grpc.pb.h} |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ut/ydb-core-security-ut |58.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/issue_id.{pb.h ... grpc.pb.h} |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_sort.cpp |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_config.{pb.h ... grpc.pb.h} |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/crypto.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_parser.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/dictionary_codec.cpp |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |58.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/index_builder.{pb.h ... grpc.pb.h} |58.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/labeled_counters.{pb.h ... grpc.pb.h} |58.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/key_range.{pb.h ... grpc.pb.h} |58.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/accessor.pb.{h, cc} |58.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_dynamic_config.pb.{h, cc} |58.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/pathid.{pb.h ... grpc.pb.h} |58.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_stats.pb.{h, cc} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_formats.pb.{h, cc} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/clickhouse.pb.{h, cc} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pqconfig.{pb.h ... grpc.pb.h} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/yql_types.pb.{h, cc} |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/coroutine.cpp |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_value.pb.{h, cc} |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/objcopy_3ea8aa67e7c24c4f0e3b0406b9.o |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/objcopy_445797246443360525d31550d1.o |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/objcopy_5831cbd77ecc92a241b6cf1ea2.o |58.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view.{pb.h ... grpc.pb.h} |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_util.cpp |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |58.8%| [PB] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/protos/lwtrace.pb.{h, cc} |58.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_fold.cpp |58.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/control_plane_storage.pb.{h, cc} |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/serializability/libpy3tests-library-serializability.global.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/lib/libpy3tools-ydb_serializable-lib.global.a |58.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_kv.{pb.h ... grpc.pb.h} |58.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config.{pb.h ... grpc.pb.h} |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/config.cpp |58.8%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/trace/v1/trace.{pb.h ... grpc.pb.h} |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |58.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/pending_fetcher.pb.{h, cc} |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/export.{pb.h ... grpc.pb.h} |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/helpers.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_source.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_round.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/snappy.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_time_order_recover.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_chopper.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/address.cpp |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/hedging_channel.cpp |58.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/hive.{pb.h ... grpc.pb.h} |58.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_zip.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/operation_id_or_alias.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/record_helpers.cpp |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/control_plane_proxy.pb.{h, cc} |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_key.cpp |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/objcopy_d2e759e2d0ff1243166a3bc7d9.o |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/objcopy_8bf6865076719c3317e71551a4.o |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/objcopy_51562f83ff52d1ceaac0c36a08.o |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_join_reorder.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_client.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_mount_cache.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/serialize_dump.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_writer.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/delegating_transaction.cpp |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_provider_context.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_peephole.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_compress.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authenticator.cpp |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/pipe.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_wide_flow.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/infinite_entity.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/timestamped_schema_helpers.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unordered_schemaful_reader.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/stream.cpp |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/value_consumer.cpp |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/mixedpy/ydb-tests-stress-mixedpy |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/syntax_checker.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/check_yson_token.cpp |58.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/actors.pb.{h, cc} |58.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_top.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/helpers.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/proc.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_scalar_apply.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_block_io_filter.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_batch.cpp |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/protos/aclib.pb.{h, cc} |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_io_discovery_walk_folders.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_pickle.cpp |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/filestore_config.{pb.h ... grpc.pb.h} |58.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_match_recognize_measure_arg.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/file_reader.cpp |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_sequenceshard.{pb.h ... grpc.pb.h} |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_whiteboard.{pb.h ... grpc.pb.h} |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_base.{pb.h ... grpc.pb.h} |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/resource_manager.pb.{h, cc} |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/process_exit_profiler.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/logical_type.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/helpers.cpp |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/objcopy_a3fc9153ce93c876df4c755b36.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/objcopy_ae5b9f6e7a00f305f01a3dde87.o |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_op_settings.cpp |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/objcopy_f364ff47dd846bb94c3e83f2a8.o |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/config.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/consumer.cpp |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/query_stats.{pb.h ... grpc.pb.h} |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |58.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_timezone.cpp |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_hive.{pb.h ... grpc.pb.h} |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_push.cpp |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_pq.{pb.h ... grpc.pb.h} |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/backup/impl/table_writer_ut.cpp |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zstd.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_if.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/size.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/skynet.cpp |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/dml/objcopy_40093eea68489f3f0f11ed00e4.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/dml/objcopy_8db6616d40f8020d0632222fe3.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/dml/objcopy_9314464e3560b2511ac931acd9.o |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/minikql_engine.{pb.h ... grpc.pb.h} |58.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_check_args.cpp |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/channel_purpose.{pb.h ... grpc.pb.h} |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/public.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/consumer_client.cpp |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_misc.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_dq_optimize.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_factory.cpp |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_clickhouse_internal.pb.{h, cc} |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/bzip2.cpp |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters_aggregator.{pb.h ... grpc.pb.h} |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasource_constraints.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/config.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bloom_filter.cpp |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_object_storage.pb.{h, cc} |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_detail.cpp |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/suspendable_action_queue.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hazard_ptr.cpp |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/load/ydb-tests-olap-load |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/nodes_manager.pb.{h, cc} |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/load/objcopy_2e1dd9c9bc385e6efd22b78136.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/load/objcopy_323a17e94d8d570989807d19d3.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/load/objcopy_a99732b1d02edd62e674483ffe.o |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/dml/ydb-tests-datashard-dml |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node.cpp |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group.{pb.h ... grpc.pb.h} |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/fluent_log.cpp |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/resource_broker.{pb.h ... grpc.pb.h} |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bind_channel_storage_pool.{pb.h ... grpc.pb.h} |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_service_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/helpers.cpp |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/portion_info.pb.{h, cc} |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/log_writer_detail.cpp |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/subdomains.{pb.h ... grpc.pb.h} |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/services_common.pb.{h, cc} |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/client_impl.cpp |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_disk_states.{pb.h ... grpc.pb.h} |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_common.pb.{h, cc} |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units.{pb.h ... grpc.pb.h} |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/persqueue_error_codes.pb.{h, cc} |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_service_config.{pb.h ... grpc.pb.h} |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/replication.{pb.h ... grpc.pb.h} |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/common.pb.{h, cc} |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_scheme.{pb.h ... grpc.pb.h} |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_table.pb.{h, cc} |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_config.{pb.h ... grpc.pb.h} |58.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_decimal_div.cpp |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_import.pb.{h, cc} |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/ssl_context.cpp |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/issue_severity.pb.{h, cc} |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/google/benchmark/librestricted-google-benchmark.a |59.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_visitall.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/cypress_client/public.cpp |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_ytflow_integration.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/channel.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_load_table_meta.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/query_client/query_builder.cpp |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/objcopy_199ab4be3deaff025e1ab92143.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/objcopy_8ac5034640eee44b1cd5fa5253.o |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/compatibility/objcopy_69ec8108bd4bdc059abab5b374.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/objcopy_422ca1effff14e5a08952658d0.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/compatibility/objcopy_083605b223ce507d0fef919d0d.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/compatibility/objcopy_4246ee6b3505ab22753eb44ce7.o |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/batch_operations/ydb-core-kqp-ut-batch_operations |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/config.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/operation_cache.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/parser.cpp |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/public.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/transaction.cpp |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/name_table.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/statistics_producer.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/action_queue.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/public.cpp |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/list_verb_lazy_yson_consumer.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/retrying_channel.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_options.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/requests.cpp |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/yql_translation_settings.{pb.h ... grpc.pb.h} |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/pollable_detail.cpp |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_scheme_op.{pb.h ... grpc.pb.h} |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config.{pb.h ... grpc.pb.h} |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/ut/database_resolver_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/common.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_executor.cpp |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/objcopy_14c03c6aecffbe39cb01ddf2ed.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/objcopy_5051832ffa0b6b13cebe014eb1.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/objcopy_d52256d4fa9895f38df6030445.o |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |58.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/c52bef66453eb652f14989b79d_raw.auxcpp |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/objcopy_320dc1aa155c1ac9123afec54c.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/objcopy_1007df29dec27b0b7a1587d49f.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/objcopy_b91160bcee04ad1f57e80af064.o |59.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/552c373b422666221556a5a9bd_raw.auxcpp |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/objcopy_53073eb93c76466fca8f474c5f.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/objcopy_643fa2679e88d9b2d33558b050.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/objcopy_a0bee0ed11edab150a8172af5c.o |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/helpers.cpp |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_federation_discovery.pb.{h, cc} |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board.{pb.h ... grpc.pb.h} |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/local_bypass.cpp |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_stats.{pb.h ... grpc.pb.h} |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_persqueue_cluster_discovery.pb.{h, cc} |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/dummy.{pb.h ... grpc.pb.h} |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limiter.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool.cpp |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/validation.pb.{h, cc} |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sqs.{pb.h ... grpc.pb.h} |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_consumer.cpp |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/arithmetic_formula.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/objcopy_2d296dfaf373f7f15e6312517a.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/objcopy_938861be99a6cedecb22904193.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/objcopy_ac7eeedcbf7038a60a7673762a.o |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tx.{pb.h ... grpc.pb.h} |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/metrics.{pb.h ... grpc.pb.h} |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/objcopy_6e536fb2c379a4ebe79c499de8.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/objcopy_9caa7583d1e4955730dbd6f3fd.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/objcopy_0553360a969b2c9633badb428d.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/objcopy_245adf3e28f56e6467e034d9f2.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/objcopy_31d605682329607481eb568ed0.o |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_filtering_consumer.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zlib.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_helpers.cpp |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/fq_config.pb.{h, cc} |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/utf8_decoder.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_iterable.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/profiling_helpers.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/socket.cpp |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_events.{pb.h ... grpc.pb.h} |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistics.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_node_factory.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_row.cpp |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tracing.{pb.h ... grpc.pb.h} |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dispatcher.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/yson_builder.cpp |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/comparator.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_ifpresent.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_match_recognize_rows_formatter.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_combine.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/file_writer.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_merge.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_ut.cpp |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config.{pb.h ... grpc.pb.h} |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/data_statistics.cpp |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_stream.cpp |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_discovery.pb.{h, cc} |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/merge_complex_types.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/public.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/config.cpp |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_coordination.pb.{h, cc} |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/table_client.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_table.cpp |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/objcopy_5b5c3367c789898aa5a6cae866.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/objcopy_8e57113197bb359e3999b04aab.o |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_flags.{pb.h ... grpc.pb.h} |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/objcopy_a40d299361b06d7622f78b2238.o |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gbenchmark/libcpp-testing-gbenchmark.a |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/donor.cpp |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/virtual.cpp |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/objcopy_86ad37399122e504f3e6d8378d.o |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider_ut.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/flavours/libpy3tests-library-flavours.global.a |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_utils_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/locks_ut.cpp |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/objcopy_5a4a401f33f46c70417a65f584.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/objcopy_e3640190fc6b98b359c2a9e990.o |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/flat_ut.cpp |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/objcopy_0c4ce75555cfd5c0dd63e9dfbd.o |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/objcopy_0b6bc206b470900b0b94249ade.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/objcopy_6e0da74b1512d0ffe19c5dc500.o |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/object_storage_listing_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/4129dc9878c2058404494fb088_raw.auxcpp |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/objcopy_c84c8d511807425dc18073129b.o |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/cancel_tx_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/ssl_helpers.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/protobuf_helpers.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/check_schema_compatibility.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/timestamp_provider.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key_bound.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/config.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pool_allocator.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_visitor.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/public.cpp |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/s3_settings.{pb.h ... grpc.pb.h} |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/config.cpp |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |58.8%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/generated/runtime_feature_flags.h |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/vdisk_restart.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/idx_test/ydb_index_ut.cpp |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/objcopy_7c81cbfa6b5ce112674cb0a849.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/objcopy_79fff48e52404c1611400b8a2c.o |58.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/0dff0b13f2d02975a4a973a1e8_raw.auxcpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/b9d4e191a9fd03221b46c5af49_raw.auxcpp |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/ydb-core-tx-schemeshard-ut_data_erasure |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/objcopy_bf578b7161cc94bf18488d04ca.o |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/stop_pdisk.cpp |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/objcopy_55f2556d6eafcd77ebc4c517d4.o |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/read_only_vdisk.cpp |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/objcopy_f928a40774b17a9d6cd7cabd2c.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/objcopy_e7477203b27fa0321cf18fd7ee.o |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/ut_selfheal/self_heal_actor_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/replication.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vpatch_actor_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_logic_ut.cpp |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/objcopy_e2acb41e7099c0db4fe54a1587.o |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/ut_selfheal/main.cpp |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |58.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_discard.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/replication_huge.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/osiris.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_rw_lock.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/single_queue_scheduler_thread.cpp |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/example/objcopy_2b682e146a665bfa19210b0fd9.o |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/storagepoolmon/ut/storagepoolmon_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_proto_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_engine_flat_host_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/kikimr_program_builder_ut.cpp |58.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_stats.{pb.h ... grpc.pb.h} |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/example/objcopy_5525925030ba2866c1b1040841.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/example/objcopy_c623700776b43ee95ec93c56f9.o |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/libpy3kqprun_recipe.global.a |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/objcopy_dcbdf62672440a626e79a64e14.o |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_engine_flat_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/rm_service/kqp_rm_ut.cpp |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/example/ydb-tests-example |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/grouper_ut.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/mv_object_map_ut.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/group_mapper_ut.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_data_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_broker_ut.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_localwriter_ut.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncquorum_ut.cpp |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/read_attributes_utils_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_calls_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_provider_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/operation_helpers_ut.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_gateway_ut.cpp |66.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/json_change_record_ut.cpp |65.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/entity_id_ut.cpp |65.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_object_storage_listing.cpp |65.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |66.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_kv_ut.cpp |66.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |66.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/restart_pdisk.cpp |66.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_sqlin_ut.cpp |66.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/view/view_ut.cpp |66.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/ydb/ut/ydb_ut.cpp |66.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/objcopy_4cf502b19212965f14d6660a20.o |66.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/objcopy_484246668d943fbae3b476ec7d.o |66.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/tests/objcopy_8e19d47784789c55156c57f816.o |66.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/inside_ydb_ut/inside_ydb_ut.cpp |66.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_locks_ut.cpp |66.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/iceberg_processor_ut.cpp |66.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_service_ut.cpp |66.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/cache_ut.cpp |66.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/rows_proto_splitter_ut.cpp |66.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_document_api_ut.cpp |66.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/util_ut.cpp |66.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_agg_ut.cpp |66.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_qs_scripts_ut.cpp |66.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_erase_rows.cpp |66.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_merge_ut.cpp |66.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_extract_predicate_unpack_ut.cpp |66.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_qs_queries_ut.cpp |65.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_ranges_ut.cpp |65.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_ne_ut.cpp |65.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_not_null_ut.cpp |65.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_returning_ut.cpp |65.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/graph/shard/ut/shard_ut.cpp |65.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/objcopy_065e9244d685c2b8f0ab66e414.o |65.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_sort_ut.cpp |65.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/objcopy_461999da7ba13deab5689c18ec.o |65.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_streaming/grpc_streaming_ut.cpp |65.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_locks_tricky_ut.cpp |65.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_locks_ut.cpp |65.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/kinesis_ut.cpp |65.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/ymq_ut.cpp |65.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp |65.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp |65.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp |65.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/tests/objcopy_9be8b6745d0fa150928bab4206.o |65.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/tests/objcopy_8649dacdf340abe7c53df69638.o |65.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/workload/libpy3stress-olap_workload-workload.global.a |65.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_tx_ut.cpp |65.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_streaming/ut/grpc/libgrpc_streaming-ut-grpc.a |65.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_mvcc_ut.cpp |65.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_script_executions_ut.cpp |65.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/address_classification/net_classifier_ut.cpp |65.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/sequencer_ut.cpp |65.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/kqp_olap_stats_ut.cpp |65.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/tenant_ut_pool.cpp |65.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/tenant_ut_local.cpp |65.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/ut_vdisk/lib/libblobstorage-ut_vdisk-lib.a |65.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/tenant_node_enumeration_ut.cpp |65.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/board_subscriber_ut.cpp |65.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_restore_scan.cpp |65.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/node_broker_ut.cpp |65.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/topic_reader_ut.cpp |65.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |65.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/monitoring_ut.cpp |65.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/perf/kqp_workload_ut.cpp |65.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_table/ut_external_table.cpp |65.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sequenceproxy/sequenceproxy_ut.cpp |65.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_labeled.cpp |65.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/data/kqp_read_null_ut.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/balancing.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_counters.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_common.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_kqp.cpp |65.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/table_writer_ut.cpp |65.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator_client/actor_client_ut.cpp |65.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_stats_ut.cpp |65.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator_client/ut_helpers.cpp |65.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_types_ut.cpp |65.4%| [AR] {default-linux-x86_64, release, asan} $(B)/yt/yt/core/libyt-yt-core.a |65.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/compression_ut.cpp |65.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_view/ut_view.cpp |65.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/ydb_convert_ut.cpp |65.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/idx_test/libpublic-lib-idx_test.a |65.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/table_description_ut.cpp |65.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |65.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_params_ut.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_explain_ut.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/statistics_ut.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_serverless_reboots/ut_serverless_reboots.cpp |65.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |65.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/benchmark/b_part.cpp |65.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ut_topic_splitmerge.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_ut.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/hive_impl_ut.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/replica_ut.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/object_distribution_ut.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_scan_data_ut.cpp |65.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/runtime/kqp_scan_spilling_ut.cpp |65.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ut_extsubdomain_reboots.cpp |65.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_sequence_reboots/ut_sequence_reboots.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_compute_scheduler_ut.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/runtime/kqp_scan_logging_ut.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/storage_pool_info_ut.cpp |65.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream/ut_cdc_stream.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_keys.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut_ftol/dsproxy_fault_tolerance_ut.cpp |65.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/hive_ut.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/query_stats/query_stats_ut.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/populator_ut.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/ut_external_data_source_reboots.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/result_formatter/result_formatter_ut.cpp |65.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_column_statistics.cpp |65.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_background_cleaning/ut_background_cleaning.cpp |65.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_http_request.cpp |65.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_basic_statistics.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/ut/ut_utils.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/topic_data_ut.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_scan.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/aggregations_ut.cpp |65.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/clickbench_ut.cpp |65.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/viewer_ut.cpp |65.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/compression_ut.cpp |65.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/datatime64_ut.cpp |65.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/pq_read |65.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/ut/helpers/libmkql_proto-ut-helpers.a |65.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/decimal_ut.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_sequence/ut_sequence.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/delete_ut.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_filestore_reboots/ut_filestore_reboots.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/scale_recommender_policy_ut.cpp |65.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/indexes_ut.cpp |65.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/olap/helpers/libut-olap-helpers.a |65.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/locks_ut.cpp |65.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |65.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl_utility.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp |65.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/kqp_olap_ut.cpp |65.5%| [AR] {default-linux-x86_64, release, asan} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |65.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/optimizer_ut.cpp |65.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp |65.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl.cpp |65.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/sparsed_ut.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_common_pq.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/sys_view_ut.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/tiering_ut.cpp |65.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/dynamic_prototype/libcpp-protobuf-dynamic_prototype.a |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_replication_reboots/ut_replication_reboots.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/write_ut.cpp |65.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/yql/libcpp-protobuf-yql.a |65.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/assign_tx_id_ut.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/stress_tool/device_test_tool.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/coordinator/coordinator_volatile_ut.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/target_discoverer_ut.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_column_stats.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/coordinator/coordinator_ut.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/race.cpp |65.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sharding/ut/ut_sharding.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_analyze_ut.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/perf/kqp_query_perf_ut.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/cache_ut.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_replication.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_followers.cpp |65.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/json_ut.cpp |65.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_init.cpp |65.7%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/core/libyt-yt-core.a |65.7%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |65.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_limits_ut.cpp |65.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_data_cleanup.cpp |65.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_volatile.cpp |65.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |66.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_minstep.cpp |66.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_table.cpp |66.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/ut_user_attributes_reboots.cpp |66.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_query_ut.cpp |68.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ut_split_merge_reboots.cpp |68.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_reshuffle_kmeans.cpp |68.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_sample_k.cpp |68.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_secondary_index.cpp |68.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_prefix_kmeans.cpp |68.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_local_kmeans.cpp |69.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |70.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_table_reboots/ut_external_table_reboots.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/ut/group_test_ut.cpp |72.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_locks.cpp |72.7%| [AR] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/liblibrary-cpp-build_info.a |72.9%| [AR] {default-linux-x86_64, release, asan} $(B)/library/cpp/svnversion/liblibrary-cpp-svnversion.a |73.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |73.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |73.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup/ut_backup.cpp |73.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/datastreams/datastreams_ut.cpp |73.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_reboots/ut_reboots.cpp |73.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/ut_backup_collection_reboots.cpp |73.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/dst_creator_ut.cpp |73.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/describes_ut/ic_cache_ut.cpp |73.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |73.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/describes_ut/describe_topic_ut.cpp |73.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |73.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge/ut_split_merge.cpp |73.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |73.1%| [AR] {default-linux-x86_64, release, asan} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |73.1%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |73.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain/ut_extsubdomain.cpp |73.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_backup.cpp |73.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |73.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |73.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/sdk_sessions_ut/sdk_sessions_ut.cpp |73.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |73.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/in_memory_control_plane_storage_ut.cpp |73.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |73.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_quotas_ut.cpp |73.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_ut.cpp |73.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk |73.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/stress_tool/ydb_stress_tool |73.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_permissions_ut.cpp |73.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_permissions_ut.cpp |73.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_ut.cpp |73.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |73.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_ut.cpp |73.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_ut.cpp |73.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_permissions_ut.cpp |73.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |73.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_internal_ut.cpp |73.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |73.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |73.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/test/test_import/libtest_import_udf.so |72.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/ut/ydb-core-client-ut |72.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/secret/ut/ut_secret.cpp |72.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |72.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |72.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/functions_executor_wrapper.cpp |72.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_ut.cpp |72.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/pqtablet_mock.cpp |72.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/kqp_mock.cpp |72.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_fixture.cpp |72.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/first_class_src_ids_ut.cpp |72.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/worker_ut.cpp |72.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |72.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |72.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/topic_service_ut.cpp |72.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/topic_yql_ut.cpp |72.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_common_ut.cpp |72.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |72.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/demo_tx.cpp |72.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_compat_ut.cpp |72.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |72.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |72.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/encrypted_storage_ut.cpp |72.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |72.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/federated_topic/ut/basic_usage_ut.cpp |72.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/initializer/ut/ut_init.cpp |72.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tiering/ut/ut_object.cpp |72.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tiering/ut/ut_tiers.cpp |72.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration |72.8%| [AR] {default-linux-x86_64, release, asan} $(B)/yt/yt/client/libyt-yt-client.a |72.8%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/client/libyt-yt-client.a |72.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |72.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |72.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |72.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_ut.cpp |72.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |72.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |72.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |72.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |72.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |72.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |72.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |72.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |72.9%| [LD] {default-linux-x86_64, release, asan} $(B)/yql/essentials/tools/sql2yql/sql2yql |72.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/essentials/tools/sql2yql/sql2yql |72.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |72.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |73.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/main.cpp |73.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |73.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ext_index/ut/ut_ext_index.cpp |73.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/query_replay.cpp |73.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/query_proccessor.cpp |73.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |73.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |73.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |73.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |73.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/query_compiler.cpp |73.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |73.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |73.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |73.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |73.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |73.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |73.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |73.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |73.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |74.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |74.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |74.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |74.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |75.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut |75.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |76.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |76.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |77.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |77.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |78.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |78.0%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |78.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |78.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |78.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |78.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |78.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |78.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |78.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |78.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |78.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |78.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |78.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |79.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_data_cleanup/ydb-core-tx-datashard-ut_data_cleanup |79.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |79.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |79.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/pire/libpire_udf.so |79.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |79.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |80.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |80.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/rate_limiter/rate_limiter_ut.cpp |80.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |80.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |80.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |80.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |80.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |80.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |80.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |80.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |80.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |80.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |80.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |80.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |80.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |80.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |80.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/query_replay/ydb_query_replay |80.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |80.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/fq/ut_integration/ut_utils.cpp |80.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |80.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |80.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |80.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/common_ut.cpp |80.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/fq/ut_integration/fq_ut.cpp |80.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |80.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |80.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |80.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |80.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |80.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |80.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |80.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |80.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compression_ut.cpp |80.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compress_executor_ut.cpp |80.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |80.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/retry_policy_ut.cpp |80.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp |80.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |80.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/build_index/ut/ydb-core-tx-datashard-build_index-ut |80.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/sdk_sessions_pool_ut/sdk_sessions_pool_ut.cpp |80.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |80.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |80.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |80.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |80.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |80.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |80.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/cms/cms_ut.cpp |80.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |80.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |80.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |80.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |80.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |80.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |80.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |80.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |80.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/network/libessentials-utils-network.a |80.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |80.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/read_session_ut.cpp |80.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/query_replay.cpp |80.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |80.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime |80.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/main.cpp |80.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |80.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |80.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |80.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |80.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |80.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/query_compiler.cpp |80.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |80.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |80.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |80.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |80.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/solomon/ydb-tests-fq-solomon |80.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |80.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut |80.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/protobuf_udf/libessentials-minikql-protobuf_udf.a |80.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |80.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |80.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |80.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |80.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |80.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_common_new_schemecache_ut.cpp |80.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/protobuf/libprotobuf_udf.global.a |80.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_new_schemecache_ut.cpp |80.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/file/libfile_udf.global.a |80.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/streaming/libstreaming_udf.global.a |80.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |80.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |80.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |80.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |80.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/query_replay_yt/query_replay_yt |80.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |80.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |80.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/re2/libre2_udf.so |80.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |80.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |80.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |80.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |80.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |80.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |80.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |80.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |80.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |80.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |80.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |80.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator_grpc/solomon_recipe_grpc |80.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |80.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |80.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |79.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/example/ydb-tests-example |79.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/example/ydb-tests-example |79.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/datetime/libdatetime_udf.so |79.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |79.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |78.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |78.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |75.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/apps/ydb/ydb |75.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydb/ydb |75.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |75.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |75.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |75.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |75.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |75.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |75.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |75.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |75.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |75.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |75.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |75.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |75.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |75.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |75.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import |75.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import |75.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |75.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |75.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |75.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |75.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/string/libstring_udf.so |75.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/datetime2/libdatetime2_udf.so |74.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |74.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |74.0%| PREPARE $(FLAKE8_LINTER-sbr:6561765464) |73.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |73.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |73.3%| [ld] {default-linux-x86_64, release, asan} $(B)/tools/flake8_linter/flake8_linter |73.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |73.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/docs/generator/generator |73.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |73.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |73.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/docs/generator/generator |73.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |73.3%| [AR] {RESULT} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |73.3%| [AR] {RESULT} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |73.3%| RESOURCE $(sbr:4966407557) |73.3%| [AR] {RESULT} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |73.3%| [AR] {RESULT} $(B)/yt/yt/client/libyt-yt-client.a |73.3%| PREPARE $(BLACK_LINTER-sbr:8415400280) |73.3%| [AR] {RESULT} $(B)/yt/yt/core/libyt-yt-core.a |73.3%| [UN] {default-linux-x86_64, release, asan} $(B)/yql/essentials/tests/common/test_framework/udfs_deps/common-test_framework-udfs_deps.pkg.fake >> test_schemeshard_limits.py::flake8 [GOOD] >> test_config_with_metadata.py::flake8 [GOOD] |73.3%| [LD] {RESULT} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc >> test_workload.py::flake8 [GOOD] |73.3%| [LD] {RESULT} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts >> test.py::flake8 [GOOD] >> test_generate_dynamic_config.py::flake8 [GOOD] >> tablet_scheme_tests.py::flake8 [GOOD] |73.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/library/ut/ydb-tests-library-ut >> base.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] |73.3%| [SB] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/postgresql/psql/psql |73.3%| [ld] {default-linux-x86_64, release, asan} $(B)/tools/black_linter/black_linter |73.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/dml/ydb-tests-datashard-dml >> test_disk.py::flake8 [GOOD] >> test_tpch_import.py::flake8 [GOOD] >> test_clickhouse.py::flake8 [GOOD] >> test_tablet.py::flake8 [GOOD] >> test_quota_exhaustion.py::flake8 [GOOD] |73.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/data_quotas/ydb-tests-olap-data_quotas |73.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/sql/ydb-tests-sql |73.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |73.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/flake8 >> test_schemeshard_limits.py::flake8 [GOOD] >> test_greenplum.py::flake8 [GOOD] |73.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/stress/log/tests/flake8 >> test_workload.py::flake8 [GOOD] |73.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/dml/ydb-tests-datashard-dml |73.3%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests >> test.py::flake8 [GOOD] >> test_log_scenario.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |73.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/config/flake8 >> test_generate_dynamic_config.py::flake8 [GOOD] |73.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/data_quotas/ydb-tests-olap-data_quotas |73.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |73.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/sql/ydb-tests-sql |73.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part13/flake8 >> test.py::flake8 [GOOD] >> test_join.py::flake8 [GOOD] >> zip_bomb.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_mysql.py::flake8 [GOOD] |73.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_tests/flake8 >> tablet_scheme_tests.py::flake8 [GOOD] |73.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/s3_import/flake8 >> test_tpch_import.py::flake8 [GOOD] >> common.py::flake8 [GOOD] |73.3%| [LD] {RESULT} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |73.3%| [LD] {RESULT} $(B)/ydb/tests/olap/data_quotas/ydb-tests-olap-data_quotas >> hive_matchers.py::flake8 [GOOD] >> test_pdisk_format_info.py::flake8 [GOOD] |73.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large >> test.py::flake8 [GOOD] >> test_create_tablets.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> test_postgresql.py::flake8 [GOOD] >> test_drain.py::flake8 [GOOD] >> test_replication.py::flake8 [GOOD] >> alter_compression.py::flake8 [GOOD] >> test_kill_tablets.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_self_heal.py::flake8 [GOOD] >> test_rename.py::flake8 [GOOD] >> test_tablet_channel_migration.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |73.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |73.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/flake8 >> test_tablet.py::flake8 [GOOD] |73.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part9/flake8 >> test.py::flake8 [GOOD] |73.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |73.4%| [UN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/postgresql/psql/psql |73.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/data_quotas/flake8 >> test_quota_exhaustion.py::flake8 [GOOD] |73.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part17/flake8 >> test.py::flake8 [GOOD] |73.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/dicts/libdicts_udf.so |73.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part2/flake8 >> test.py::flake8 [GOOD] >> base.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_clickbench.py::flake8 [GOOD] >> test_crud.py::flake8 [GOOD] >> test_clickbench.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_ydb.py::flake8 [GOOD] >> test_serverless.py::flake8 [GOOD] |73.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |73.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/flake8 >> zip_bomb.py::flake8 [GOOD] |73.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part5/flake8 >> test.py::flake8 [GOOD] >> test_tpcds.py::flake8 [GOOD] >> test_tpch.py::flake8 [GOOD] >> test_tpcds.py::flake8 [GOOD] >> test_inserts.py::flake8 [GOOD] >> test_kv.py::flake8 [GOOD] >> test_cms_erasure.py::flake8 [GOOD] >> test_diff_processing.py::flake8 [GOOD] >> test_tpch.py::flake8 [GOOD] >> test_cms_restart.py::flake8 [GOOD] >> select_datetime_with_service_name.py::flake8 [GOOD] >> test_restarts.py::flake8 [GOOD] >> test_tpch_spilling.py::flake8 [GOOD] >> test_cms_state_storage.py::flake8 [GOOD] >> overlapping_portions.py::flake8 [GOOD] >> test_postgres.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] >> test_leader_start_inflight.py::flake8 [GOOD] |73.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part18/flake8 >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_sql_streaming.py::flake8 [GOOD] >> select_positive_with_service_name.py::flake8 [GOOD] >> utils.py::flake8 [GOOD] |73.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli >> test.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] |73.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/flake8 >> test_tablet_channel_migration.py::flake8 [GOOD] |73.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part11/flake8 >> test.py::flake8 [GOOD] |73.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/flake8 >> test_kill_tablets.py::flake8 [GOOD] |73.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/flake8 >> base.py::flake8 [GOOD] |73.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/flake8 >> test_rename.py::flake8 [GOOD] |73.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/streaming/libstreaming_udf.so |73.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part1/flake8 >> test.py::flake8 [GOOD] |73.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/logs/dsv/libdsv_udf.so |73.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/dummylog/libdummylog.so >> test.py::flake8 [GOOD] >> helpers.py::flake8 [GOOD] >> test_join.py::flake8 [GOOD] |73.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/generic/analytics/flake8 >> test_ydb.py::flake8 [GOOD] |73.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part8/flake8 >> test.py::flake8 [GOOD] |73.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/lists/liblists_udf.so |73.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/set/libset_udf.so |73.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/tpc/medium/flake8 >> test_tpch.py::flake8 [GOOD] |73.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/vector/libvector_udf.so |73.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/sql/flake8 >> test_kv.py::flake8 [GOOD] |73.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/structs/libstructs_udf.so |73.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/tpc/large/flake8 >> test_tpch_spilling.py::flake8 [GOOD] |73.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/load/flake8 >> test_tpch.py::flake8 [GOOD] |73.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/flake8 >> test_serverless.py::flake8 [GOOD] >> test_example.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> allure_utils.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> results_processor.py::flake8 [GOOD] >> utils.py::flake8 [GOOD] >> ydb_cli.py::flake8 [GOOD] >> ydb_cluster.py::flake8 [GOOD] |73.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/callables/libcallables_udf.so >> test_base.py::flake8 [GOOD] |73.5%| [LD] {RESULT} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api >> test_query.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] |73.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperloglog/libhyperloglog_udf.so >> test_s3.py::flake8 [GOOD] |73.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/postgresql/flake8 >> test_postgres.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_bulkupserts_tpch.py::flake8 [GOOD] >> test_insert_delete_duplicate_records.py::flake8 [GOOD] >> test_insertinto_selectfrom.py::flake8 [GOOD] >> test_tiering.py::flake8 [GOOD] >> test_workload_manager.py::flake8 [GOOD] |73.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/flake8 >> utils.py::flake8 [GOOD] |73.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/flake8 >> test_sql_streaming.py::flake8 [GOOD] |73.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/oom/flake8 >> overlapping_portions.py::flake8 [GOOD] |73.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/merge_split_common_table/std/flake8 >> test.py::flake8 [GOOD] |73.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/tools/kqprun/recipe/flake8 >> __main__.py::flake8 [GOOD] |73.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/flake8 >> test_restarts.py::flake8 [GOOD] |73.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/large/flake8 >> test_leader_start_inflight.py::flake8 [GOOD] |73.6%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/libmath_udf.so |73.6%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/type_inspection/libtype_inspection_udf.so |73.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part12/flake8 >> test.py::flake8 [GOOD] |73.6%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/test/simple/libsimple_udf.so |73.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/stress/oltp_workload/tests/flake8 >> test_workload.py::flake8 [GOOD] |73.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/generic/streaming/flake8 >> test_join.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_auth_system_views.py::flake8 [GOOD] >> test_create_users.py::flake8 [GOOD] >> test_create_users_strict_acl_checks.py::flake8 [GOOD] >> test_db_counters.py::flake8 [GOOD] >> test_dynamic_tenants.py::flake8 [GOOD] >> test_publish_into_schemeboard_with_common_ssring.py::flake8 [GOOD] >> test_common.py::flake8 [GOOD] >> test_storage_config.py::flake8 [GOOD] >> test_system_views.py::flake8 [GOOD] >> test_tenants.py::flake8 [GOOD] >> test_user_administration.py::flake8 [GOOD] >> test_users_groups_with_acl.py::flake8 [GOOD] >> test_yandex_cloud_mode.py::flake8 [GOOD] >> test_yandex_cloud_queue_counters.py::flake8 [GOOD] |73.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/sql/lib/flake8 >> test_s3.py::flake8 [GOOD] |73.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/serializable/flake8 >> test.py::flake8 [GOOD] |73.6%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/datasource/oracle/flake8 >> test.py::flake8 [GOOD] |73.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/lib/flake8 >> ydb_cluster.py::flake8 [GOOD] |73.6%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/libtopfreq_udf.so |73.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part19/flake8 >> test.py::flake8 [GOOD] |73.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/example/flake8 >> test_example.py::flake8 [GOOD] |73.6%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/datasource/clickhouse/flake8 >> test.py::flake8 [GOOD] |73.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/sql/large/flake8 >> test_workload_manager.py::flake8 [GOOD] >> conftest.py::black [GOOD] >> conftest.py::black [GOOD] >> test_clickhouse.py::black [GOOD] >> test_greenplum.py::black [GOOD] |73.5%| [LD] {RESULT} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |73.5%| [LD] {RESULT} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |73.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/flake8 >> test_yandex_cloud_queue_counters.py::flake8 [GOOD] |73.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/tenants/flake8 >> test_users_groups_with_acl.py::flake8 [GOOD] |73.2%| [LD] {RESULT} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |73.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/digest/libdigest_udf.so |73.2%| [LD] {RESULT} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption >> test_join.py::black [GOOD] |73.1%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |73.1%| [LD] {RESULT} $(B)/ydb/tests/sql/ydb-tests-sql >> test_join.py::black [GOOD] >> test.py::flake8 [GOOD] |73.1%| [LD] {RESULT} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |73.1%| [LD] {RESULT} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |72.9%| [LD] {RESULT} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |73.0%| [LD] {RESULT} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |73.0%| [LD] {RESULT} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |72.9%| [LD] {RESULT} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |72.9%| [LD] {RESULT} $(B)/ydb/tests/example/ydb-tests-example |72.9%| [LD] {RESULT} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |72.9%| [LD] {RESULT} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import >> test_mysql.py::black [GOOD] >> test_workload.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] |72.8%| [LD] {RESULT} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |72.8%| [LD] {RESULT} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |72.8%| [LD] {RESULT} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |72.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/generic/streaming/black >> test_join.py::black [GOOD] |72.6%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/libstat_udf.so |72.6%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/yson2/libyson2_udf.so |72.6%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |72.6%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |72.6%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/histogram/libhistogram_udf.so >> collection.py::flake8 [GOOD] >> test_auditlog.py::flake8 [GOOD] >> test_postgresql.py::black [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |72.6%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |72.6%| [LD] {RESULT} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |72.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/merge_split_common_table/fifo/flake8 >> test.py::flake8 [GOOD] |72.6%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/top/libtop_udf.so |72.6%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |72.6%| [LD] {RESULT} $(B)/ydb/apps/ydb/ydb >> test_ydb.py::black [GOOD] >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |72.6%| [LD] {RESULT} $(B)/yql/essentials/tools/sql2yql/sql2yql |72.6%| [LD] {RESULT} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |72.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/stress/kv/tests/flake8 >> test_workload.py::flake8 [GOOD] |72.5%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/datasource/ms_sql_server/flake8 >> test.py::flake8 [GOOD] |72.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |72.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/generic/analytics/black >> test_ydb.py::black [GOOD] |72.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/flake8 >> test_auditlog.py::flake8 [GOOD] |72.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution >> base.py::flake8 [GOOD] >> kikimr_config.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> data_correctness.py::flake8 [GOOD] >> data_migration_when_alter_ttl.py::flake8 [GOOD] >> tier_delete.py::flake8 [GOOD] >> ttl_delete_s3.py::flake8 [GOOD] >> ttl_unavailable_s3.py::flake8 [GOOD] >> unstable_connection.py::flake8 [GOOD] >> test_encryption.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_alter_compression.py::flake8 [GOOD] >> test_alter_tiering.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] |72.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part3/flake8 >> test.py::flake8 [GOOD] |72.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part10/flake8 >> test.py::flake8 [GOOD] >> test_read_update_write_load.py::flake8 [GOOD] >> test_scheme_load.py::flake8 [GOOD] >> test_crud.py::flake8 [GOOD] >> test_simple.py::flake8 [GOOD] >> test_discovery.py::flake8 [GOOD] |72.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/flake8 >> unstable_connection.py::flake8 [GOOD] |72.4%| [TS] {RESULT} ydb/tests/stress/log/tests/flake8 |72.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part6/flake8 >> test.py::flake8 [GOOD] |72.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/library/ut/flake8 >> kikimr_config.py::flake8 [GOOD] >> test_execute_scheme.py::flake8 [GOOD] >> test_indexes.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_isolation.py::flake8 [GOOD] >> test_public_api.py::flake8 [GOOD] >> test_read_table.py::flake8 [GOOD] >> test_session_grace_shutdown.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_session_pool.py::flake8 [GOOD] >> test_2_selects_limit.py::flake8 [GOOD] >> test_3_selects.py::flake8 [GOOD] >> test_bad_syntax.py::flake8 [GOOD] >> test_base.py::flake8 [GOOD] >> test_big_state.py::flake8 [GOOD] >> test_continue_mode.py::flake8 [GOOD] >> test_cpu_quota.py::flake8 [GOOD] >> test_delete_read_rules_after_abort_by_system.py::flake8 [GOOD] >> test_eval.py::flake8 [GOOD] >> test_invalid_consumer.py::flake8 [GOOD] |72.3%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part2/flake8 |72.3%| [TS] {RESULT} ydb/tests/fq/generic/streaming/flake8 >> test_kill_pq_bill.py::flake8 [GOOD] >> test_mem_alloc.py::flake8 [GOOD] >> test_metrics_cleanup.py::flake8 [GOOD] >> test_pq_read_write.py::flake8 [GOOD] >> test_kqprun_recipe.py::flake8 [GOOD] >> test_public_metrics.py::flake8 [GOOD] >> test_read_rules_deletion.py::flake8 [GOOD] >> test_base.py::flake8 [GOOD] >> test_recovery.py::flake8 [GOOD] >> test_recovery_match_recognize.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_recovery_mz.py::flake8 [GOOD] >> test_restart_query.py::flake8 [GOOD] >> test_http_api.py::flake8 [GOOD] >> test_row_dispatcher.py::flake8 [GOOD] |72.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/flake8 >> test_simple.py::flake8 [GOOD] |72.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/encryption/flake8 >> test_encryption.py::flake8 [GOOD] >> test_select_1.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_liveness_wardens.py::flake8 [GOOD] >> test_select_limit.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_ydb_backup.py::flake8 [GOOD] >> test_ydb_flame_graph.py::flake8 [GOOD] >> test_ydb_impex.py::flake8 [GOOD] >> test_ydb_recursive_remove.py::flake8 [GOOD] >> test_ydb_scheme.py::flake8 [GOOD] >> test_ydb_scripting.py::flake8 [GOOD] >> test_ydb_sql.py::flake8 [GOOD] >> test_select_limit_db_id.py::flake8 [GOOD] |72.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/api/flake8 >> test_session_pool.py::flake8 [GOOD] |72.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/recipe/solomon_recipe |72.1%| [TS] {RESULT} ydb/tests/functional/serverless/flake8 >> test_select_timings.py::flake8 [GOOD] >> test_ydb_table.py::flake8 [GOOD] >> docker_wrapper_test.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] >> test_update_script_tables.py::flake8 [GOOD] >> test_stop.py::flake8 [GOOD] >> test_watermarks.py::flake8 [GOOD] >> test_yds_bindings.py::flake8 [GOOD] >> test_yq_streaming.py::flake8 [GOOD] |72.0%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part8/flake8 |72.0%| [TS] {RESULT} ydb/tests/functional/serializable/flake8 >> test_actorsystem.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |72.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/stress/simple_queue/tests/flake8 >> test_workload.py::flake8 [GOOD] |72.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/bin/solomon_emulator >> test.py::flake8 [GOOD] |72.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part16/flake8 >> test.py::flake8 [GOOD] |72.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/ydb_cli/flake8 >> test_ydb_table.py::flake8 [GOOD] |72.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/tools/kqprun/tests/flake8 >> test_kqprun_recipe.py::flake8 [GOOD] |72.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/http_api/flake8 >> test_http_api.py::flake8 [GOOD] |72.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yds/flake8 >> test_yq_streaming.py::flake8 [GOOD] >> test_mixed.py::flake8 [GOOD] |71.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/wardens/flake8 >> test_liveness_wardens.py::flake8 [GOOD] |71.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/stress/olap_workload/tests/flake8 >> test_workload.py::flake8 [GOOD] |71.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/postgres_integrations/go-libpq/flake8 >> docker_wrapper_test.py::flake8 [GOOD] |71.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/flake8 >> test_update_script_tables.py::flake8 [GOOD] |71.9%| [TS] {RESULT} ydb/tests/sql/lib/flake8 |72.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part0/flake8 >> test.py::flake8 [GOOD] |72.0%| [LD] {RESULT} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |72.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/autoconfig/flake8 >> test_actorsystem.py::flake8 [GOOD] |72.0%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part11/flake8 >> test.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_unknown_data_source.py::flake8 [GOOD] >> test_commit.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_alloc_default.py::flake8 [GOOD] >> test_timeout.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_dc_local.py::flake8 [GOOD] >> test_result_limits.py::flake8 [GOOD] |71.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/solomon/flake8 >> test.py::flake8 [GOOD] |71.4%| [TS] {RESULT} ydb/tests/functional/hive/flake8 |71.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics >> test_scheduling.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_cp_ic.py::flake8 [GOOD] >> test_stats_mode.py::flake8 [GOOD] >> test_dispatch.py::flake8 [GOOD] >> test_query_cache.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] >> test_retry.py::flake8 [GOOD] >> parser.py::flake8 [GOOD] >> test_retry_high_rate.py::flake8 [GOOD] |71.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/stress/mixedpy/flake8 >> test_mixed.py::flake8 [GOOD] |71.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |71.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/common/flake8 >> test_unknown_data_source.py::flake8 [GOOD] |71.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/tests/kikimr_tpch/ydb-core-kqp-tests-kikimr_tpch >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> helpers.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_multinode_cluster.py::flake8 [GOOD] >> test_ctas.py::flake8 [GOOD] >> scenario.py::flake8 [GOOD] >> test_yt_reading.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_recompiles_requests.py::flake8 [GOOD] >> test_quoting.py::flake8 [GOOD] >> test_case.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |71.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part15/flake8 >> test.py::flake8 [GOOD] |71.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part14/flake8 >> test.py::flake8 [GOOD] |71.3%| COMPACTING CACHE 30.8GiB |71.4%| [TS] {RESULT} ydb/tests/functional/ydb_cli/flake8 |71.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/s3_recipe/s3_recipe |71.4%| [TS] {RESULT} ydb/tests/olap/s3_import/flake8 |71.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/file/libfile_udf.so |71.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/multi_plane/flake8 >> test_retry_high_rate.py::flake8 [GOOD] |71.4%| [TS] {RESULT} ydb/tests/olap/load/flake8 |71.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part7/flake8 >> test.py::flake8 [GOOD] |71.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/mem_alloc/flake8 >> test_scheduling.py::flake8 [GOOD] |71.4%| [TS] {RESULT} ydb/tests/fq/yds/flake8 |71.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/plans/flake8 >> test_stats_mode.py::flake8 [GOOD] |71.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/tools/pq_read/test/flake8 >> test_timeout.py::flake8 [GOOD] |71.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/query_cache/flake8 >> test_query_cache.py::flake8 [GOOD] |71.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/flake8 >> test_quoting.py::flake8 [GOOD] |71.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/docs/generator/flake8 >> parser.py::flake8 [GOOD] |71.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_import/flake8 >> test_yt_reading.py::flake8 [GOOD] |71.4%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/join/flake8 >> test_case.py::flake8 [GOOD] |71.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/multinode/flake8 >> test_recompiles_requests.py::flake8 [GOOD] >> test_sql.py::flake8 [GOOD] >> test_compatibility.py::flake8 [GOOD] >> test_followers.py::flake8 [GOOD] >> test_stress.py::flake8 [GOOD] |71.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part4/flake8 >> test.py::flake8 [GOOD] |71.4%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part5/flake8 |71.4%| [TS] {RESULT} ydb/tests/tools/kqprun/tests/flake8 |71.5%| [TS] {RESULT} ydb/tests/olap/data_quotas/flake8 |71.5%| [TS] {RESULT} ydb/tests/functional/sqs/large/flake8 |71.5%| [LD] {RESULT} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |71.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/canonical/flake8 >> test_sql.py::flake8 [GOOD] >> column_table_helper.py::flake8 [GOOD] >> range_allocator.py::flake8 [GOOD] >> s3_client.py::flake8 [GOOD] >> thread_helper.py::flake8 [GOOD] >> time_histogram.py::flake8 [GOOD] >> utils.py::flake8 [GOOD] >> ydb_client.py::flake8 [GOOD] |71.5%| [LD] {BAZEL_DOWNLOAD} $(B)/contrib/python/moto/bin/moto_server |71.5%| [TS] {RESULT} ydb/tests/fq/generic/streaming/black |71.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/compatibility/flake8 >> test_stress.py::flake8 [GOOD] |71.5%| [TS] {RESULT} ydb/tests/functional/script_execution/flake8 |71.5%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part9/flake8 |71.5%| [TS] {RESULT} ydb/tests/sql/large/flake8 >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |71.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/common/flake8 >> ydb_client.py::flake8 [GOOD] |71.5%| [TS] {RESULT} ydb/tests/functional/limits/flake8 |71.5%| [TS] {RESULT} ydb/tests/functional/config/flake8 >> test_alter_ops.py::flake8 [GOOD] >> test_copy_ops.py::flake8 [GOOD] >> test_scheme_shard_operations.py::flake8 [GOOD] >> test_ttl.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_insert_restarts.py::flake8 [GOOD] |71.5%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part19/flake8 |71.5%| [LD] {RESULT} $(B)/ydb/tests/olap/docs/generator/generator |71.5%| [TS] {RESULT} ydb/tests/functional/postgresql/flake8 |71.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/viewer/tests/flake8 >> test.py::flake8 [GOOD] |71.5%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/datasource/mysql/flake8 >> test.py::flake8 [GOOD] |71.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/flake8 >> test_ttl.py::flake8 [GOOD] >> test_dml.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |71.5%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part13/flake8 |71.5%| [TS] {RESULT} ydb/tests/functional/cms/flake8 |71.6%| [TS] {RESULT} ydb/tests/functional/sqs/cloud/flake8 |71.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/flake8 >> test_scheme_shard_operations.py::flake8 [GOOD] |71.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/restarts/flake8 >> test_insert_restarts.py::flake8 [GOOD] |71.6%| [TS] {RESULT} ydb/tests/functional/compatibility/flake8 |71.6%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/oracle/flake8 |71.6%| [TS] {RESULT} ydb/tests/functional/sqs/with_quotas/flake8 |71.6%| [TS] {RESULT} ydb/tests/olap/common/flake8 |71.6%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part3/flake8 |71.6%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part7/flake8 |71.6%| [TS] {RESULT} ydb/tests/tools/pq_read/test/flake8 |71.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/datashard/dml/flake8 >> test_dml.py::flake8 [GOOD] |71.6%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/datasource/ydb/flake8 >> test.py::flake8 [GOOD] >> test_fifo_messaging.py::flake8 [GOOD] >> test_generic_messaging.py::flake8 [GOOD] >> test_polling.py::flake8 [GOOD] |71.6%| [TS] {RESULT} ydb/tests/functional/ttl/flake8 >> test_account_actions.py::flake8 [GOOD] >> test_acl.py::flake8 [GOOD] >> test_counters.py::flake8 [GOOD] >> test_format_without_version.py::flake8 [GOOD] >> test_garbage_collection.py::flake8 [GOOD] >> test_multiplexing_tables_format.py::flake8 [GOOD] >> test_ping.py::flake8 [GOOD] >> test_queue_attributes_validation.py::flake8 [GOOD] >> test_queue_counters.py::flake8 [GOOD] >> test_queue_tags.py::flake8 [GOOD] >> test_queues_managing.py::flake8 [GOOD] >> test_throttling.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> select_positive_with_schema.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |71.6%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/mysql/flake8 |71.6%| [TS] {RESULT} ydb/core/viewer/tests/flake8 |71.6%| [TS] {RESULT} ydb/tests/functional/scheme_shard/flake8 |71.6%| [TS] {RESULT} ydb/tests/fq/restarts/flake8 |71.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/flake8 >> test_polling.py::flake8 [GOOD] |71.6%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part18/flake8 |71.6%| [TS] {RESULT} ydb/tests/functional/scheme_tests/flake8 |71.6%| [TS] {RESULT} ydb/tests/functional/autoconfig/flake8 |71.6%| [TS] {RESULT} ydb/tests/fq/http_api/flake8 |71.6%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/tools/astdiff/astdiff |71.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |71.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |71.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |71.7%| [TS] {RESULT} ydb/tests/olap/ttl_tiering/flake8 |71.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |71.7%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/datasource/postgresql/flake8 >> test.py::flake8 [GOOD] |71.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperscan/libhyperscan_udf.so |71.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/common/flake8 >> test_throttling.py::flake8 [GOOD] |71.7%| [TS] {RESULT} ydb/tests/fq/common/flake8 |71.7%| [TS] {RESULT} ydb/tests/postgres_integrations/go-libpq/flake8 |71.7%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/ydb/flake8 |71.7%| [TS] {RESULT} ydb/tests/datashard/dml/flake8 |71.7%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part12/flake8 |71.7%| [TS] {RESULT} ydb/tests/functional/tpc/large/flake8 |71.7%| [TS] {RESULT} ydb/tests/tools/nemesis/ut/flake8 |71.7%| [LD] {RESULT} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |71.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/python/python3_small/libpython3_udf.so |71.7%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part6/flake8 |71.7%| [TS] {RESULT} ydb/tests/fq/solomon/flake8 |71.7%| [TS] {RESULT} ydb/tests/fq/generic/analytics/black |71.7%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part0/flake8 |71.8%| [TS] {RESULT} ydb/tests/stress/olap_workload/tests/flake8 |71.8%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/postgresql/flake8 |71.8%| [TS] {RESULT} ydb/tests/olap/lib/flake8 |71.8%| [TS] {RESULT} ydb/tests/functional/sqs/common/flake8 |71.8%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/fifo/flake8 |71.8%| [LD] {RESULT} $(B)/ydb/tests/datashard/dml/ydb-tests-datashard-dml |71.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/server_restart/public-sdk-cpp-tests-integration-server_restart |71.8%| [TS] {RESULT} ydb/tests/olap/flake8 |71.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_indexes/ydb-tests-functional-kqp-kqp_indexes |71.8%| [TS] {RESULT} ydb/tests/olap/oom/flake8 |71.8%| [LD] {RESULT} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |71.8%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part16/flake8 |71.8%| [TS] {RESULT} ydb/tests/library/ut/flake8 |71.8%| [TS] {RESULT} ydb/tests/stress/simple_queue/tests/flake8 |71.8%| [TS] {RESULT} ydb/tests/functional/blobstorage/flake8 |71.8%| [TS] {RESULT} ydb/tests/example/flake8 |71.8%| [TS] {RESULT} ydb/tests/functional/audit/flake8 |71.8%| [LD] {RESULT} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |71.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |71.8%| [TS] {RESULT} ydb/tests/functional/restarts/flake8 |71.8%| [TS] {RESULT} ydb/tests/olap/column_family/compression/flake8 |71.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |71.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part10/flake8 |71.9%| [LD] {RESULT} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |71.9%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/ms_sql_server/flake8 |71.9%| [TS] {RESULT} ydb/tests/stress/kv/tests/flake8 |71.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part1/flake8 |71.9%| [TS] {RESULT} ydb/tests/stress/oltp_workload/tests/flake8 |71.9%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/std/flake8 |71.9%| [TS] {RESULT} ydb/tests/functional/rename/flake8 |71.9%| [TS] {RESULT} ydb/tests/functional/tpc/medium/flake8 |71.9%| [LD] {RESULT} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |71.9%| [LD] {RESULT} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |71.9%| [LD] {RESULT} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |71.9%| [LD] {RESULT} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |71.9%| [TS] {RESULT} ydb/tests/sql/flake8 |71.9%| [LD] {RESULT} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |71.9%| [LD] {RESULT} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |71.9%| [TS] {RESULT} ydb/tests/functional/tenants/flake8 |71.9%| [LD] {RESULT} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |71.9%| [LD] {RESULT} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |71.9%| [TS] {RESULT} ydb/tests/fq/streaming_optimize/flake8 |71.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/libunicode_udf.so |72.0%| [TS] {RESULT} ydb/tests/fq/generic/analytics/flake8 |72.0%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part17/flake8 |72.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/liburl_udf.so |72.0%| [TS] {RESULT} ydb/tests/stress/mixedpy/flake8 |72.0%| [TS] {RESULT} ydb/tests/functional/wardens/flake8 |72.0%| [TS] {RESULT} ydb/tests/olap/docs/generator/flake8 |72.0%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part4/flake8 |72.0%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_import/flake8 |72.0%| [TS] {RESULT} ydb/tests/functional/sqs/multinode/flake8 |72.0%| [TS] {RESULT} ydb/tests/functional/canonical/flake8 |72.0%| [TS] {RESULT} ydb/tests/functional/encryption/flake8 |72.0%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/join/flake8 |72.0%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part15/flake8 |72.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |72.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |72.0%| [LD] {RESULT} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |72.0%| [TS] {RESULT} ydb/tests/fq/mem_alloc/flake8 |72.0%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part14/flake8 |72.0%| [LD] {RESULT} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |72.1%| [TS] {RESULT} ydb/tests/fq/plans/flake8 |72.1%| [TS] {RESULT} ydb/tests/functional/query_cache/flake8 |72.1%| [LD] {RESULT} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |72.1%| [TS] {RESULT} ydb/tests/olap/scenario/flake8 |72.1%| [TS] {RESULT} ydb/tests/fq/multi_plane/flake8 |72.2%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/clickhouse/flake8 |72.2%| [TS] {RESULT} ydb/tests/functional/sqs/messaging/flake8 |72.2%| [TS] {RESULT} ydb/tests/tools/kqprun/recipe/flake8 |72.3%| [TS] {RESULT} ydb/tests/functional/api/flake8 |72.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_query_svc/ydb-tests-functional-kqp-kqp_query_svc |72.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sdk/cpp/sdk_credprovider/ydb-tests-functional-sdk-cpp-sdk_credprovider |72.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json2/libjson2_udf.so |72.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/supp/ydb_supp |72.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/protobuf/libprotobuf_udf.so |72.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup/s3_path_style/ydb-tests-functional-backup-s3_path_style |72.4%| [LD] {RESULT} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |72.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup/ydb-tests-functional-backup |72.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_query_session/ydb-tests-functional-kqp-kqp_query_session >> conftest.py::flake8 [GOOD] >> s3_helpers.py::flake8 [GOOD] >> test_bindings_0.py::flake8 [GOOD] >> test_bindings_1.py::flake8 [GOOD] >> test_compressions.py::flake8 [GOOD] >> test_early_finish.py::flake8 [GOOD] >> test_explicit_partitioning_0.py::flake8 [GOOD] >> test_explicit_partitioning_1.py::flake8 [GOOD] >> test_format_setting.py::flake8 [GOOD] >> test_formats.py::flake8 [GOOD] >> test_inflight.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_public_metrics.py::flake8 [GOOD] >> test_push_down.py::flake8 [GOOD] >> test_s3_0.py::flake8 [GOOD] >> test_s3_1.py::flake8 [GOOD] >> test_size_limit.py::flake8 [GOOD] >> test_statistics.py::flake8 [GOOD] >> test_streaming_join.py::flake8 [GOOD] >> test_test_connection.py::flake8 [GOOD] >> test_validation.py::flake8 [GOOD] >> test_ydb_over_fq.py::flake8 [GOOD] >> test_yq_v2.py::flake8 [GOOD] |72.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/streaming_optimize/ydb-tests-fq-streaming_optimize |72.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_import/ydb-tests-fq-yt-kqp_yt_import |72.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/s3/flake8 >> test_yq_v2.py::flake8 [GOOD] |72.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/replication/ydb-tests-functional-replication |72.5%| [PK] {default-linux-x86_64, release, asan} $(B)/yql/essentials/tests/common/test_framework/udfs_deps/{common-test_framework-udfs_deps.final.pkg.fake ... yql/essentials/udfs/common/hyperscan/libhyperscan_udf.so} |72.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/load/ydb-tests-olap-load |72.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/load/ydb-tests-olap-load |72.5%| [TS] {RESULT} ydb/tests/fq/s3/flake8 |72.5%| [LD] {RESULT} $(B)/ydb/tests/olap/load/ydb-tests-olap-load |72.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |72.7%| [LD] {RESULT} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |72.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |72.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |72.7%| [LD] {RESULT} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |72.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |72.8%| [AR] {default-linux-x86_64, release, asan, pic} $(B)/yt/yt/core/libyt-yt-core.a |72.8%| [AR] {RESULT} $(B)/yt/yt/core/libyt-yt-core.a |72.8%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/core/libyt-yt-core.a |72.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |72.9%| [LD] {RESULT} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |72.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |73.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |73.0%| [LD] {RESULT} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |73.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |73.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |73.0%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |73.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |73.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |73.0%| [LD] {RESULT} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |73.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |73.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |73.0%| [LD] {RESULT} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |73.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |73.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |73.1%| [LD] {RESULT} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |73.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |73.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |73.1%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |73.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |73.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/ydb-tests-olap |73.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/ydb-tests-olap |73.1%| [LD] {RESULT} $(B)/ydb/tests/olap/ydb-tests-olap |73.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |73.2%| [LD] {RESULT} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |73.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |73.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |73.2%| [LD] {RESULT} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |73.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |73.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |73.2%| [LD] {RESULT} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |73.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |73.3%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |73.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |73.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |73.3%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |73.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |73.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |73.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |73.3%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |73.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |73.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |73.3%| [LD] {RESULT} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |73.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |73.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |73.3%| [LD] {RESULT} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |73.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |73.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |73.3%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |73.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |73.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/mixedpy/ydb-tests-stress-mixedpy |73.4%| [LD] {RESULT} $(B)/ydb/tests/stress/mixedpy/ydb-tests-stress-mixedpy |73.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/mixedpy/ydb-tests-stress-mixedpy |73.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_export_flow_proposals.cpp |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export_flow_proposals.cpp |73.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_export__cancel.cpp |73.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_export__forget.cpp |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__cancel.cpp |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__forget.cpp |73.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_export.cpp |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export.cpp |73.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_export__create.cpp |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__create.cpp |73.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |73.4%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |73.4%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |73.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk |73.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk |73.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk >> BSCReadOnlyPDisk::ReadOnlyNotAllowed >> BSCReadOnlyPDisk::RestartAndReadOnlyConsecutive |73.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |73.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |73.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::SetBrokenDiskInBrokenGroupReadOnly >> BSCReadOnlyPDisk::ReadOnlySlay >> BSCReadOnlyPDisk::SetGoodDiskInBrokenGroupReadOnlyNotAllowed |73.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::ReadOnlyOneByOne >> BSCReadOnlyPDisk::SetBrokenDiskInBrokenGroupReadOnly [GOOD] >> BSCReadOnlyPDisk::SetGoodDiskInBrokenGroupReadOnlyNotAllowed [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::SetBrokenDiskInBrokenGroupReadOnly [GOOD] Test command err: RandomSeed# 11520059350299785742 2025-04-17T17:09:48.270835Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:09:48.271329Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:09:48.271511Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:09:48.272033Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:09:48.272095Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:09:48.272298Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:09:48.272779Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:09:48.272847Z 8 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:09:48.274750Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:09:48.274925Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:09:48.275078Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:09:48.275351Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:09:48.275392Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:09:48.275552Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:09:48.275701Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:09:48.275983Z 8 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:09:48.276037Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:09:48.276324Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:09:48.276358Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:09:48.276504Z 8 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:09:48.276639Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:09:48.276774Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:09:48.276801Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:09:48.276834Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:09:48.281827Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:09:48.281894Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:09:48.281930Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:09:48.281965Z 8 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:09:48.282002Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:09:48.282057Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:09:48.282095Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:09:48.282379Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:09:49.139734Z 1 00h01m30.011024s :BS_LOCALRECOVERY CRIT: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "Some error reason" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::SetGoodDiskInBrokenGroupReadOnlyNotAllowed [GOOD] Test command err: RandomSeed# 17383790506515311358 2025-04-17T17:09:48.578432Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:09:48.578824Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:09:48.579130Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:09:48.579179Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:09:48.579354Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:09:48.579400Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:09:48.579448Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:09:48.581731Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:09:48.582326Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:09:48.582366Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:09:48.582403Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:09:48.582440Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:09:48.582476Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:09:48.582625Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:09:48.582946Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:09:48.583110Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:09:48.583135Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:09:48.583437Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:09:48.583587Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:09:48.583613Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:09:48.583639Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:09:48.588383Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:09:48.588852Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:09:48.588889Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:09:48.588945Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:09:48.588980Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:09:48.589016Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:09:48.589057Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 |73.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |73.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |73.5%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |73.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |73.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |73.5%| [LD] {RESULT} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |73.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |73.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |73.5%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |73.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_export/ut_export.cpp |73.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_export/ut_export.cpp |73.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest |73.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest |73.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::ProtoMapToJson_ReceiveMessageResult [GOOD] >> TCmsTest::StateStorageNodesFromOneRing >> TDowntimeTest::AddDowntime [GOOD] >> TDowntimeTest::HasUpcomingDowntime [GOOD] >> TDowntimeTest::CleanupOldSegments [GOOD] >> JsonProtoConversion::ProtoMapToJson [GOOD] >> JsonProtoConversion::NlohmannJsonToProtoArray >> BSCReadOnlyPDisk::ReadOnlySlay [GOOD] >> JsonProtoConversion::NlohmannJsonToProtoArray [GOOD] >> JsonProtoConversion::NlohmannJsonToProtoMap [GOOD] >> JsonProtoConversion::JsonToProtoArray |73.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::ProtoMapToJson_ReceiveMessageResult [GOOD] >> JsonProtoConversion::JsonToProtoArray [GOOD] >> TCmsTest::WalleTasks >> BSCReadOnlyPDisk::ReadOnlyNotAllowed [GOOD] >> TCmsTenatsTest::TestTenantLimit >> TCmsTest::StateRequest >> TCmsTest::TestKeepAvailableMode |73.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TDowntimeTest::CleanupOldSegments [GOOD] |73.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::ProtoMapToJson [GOOD] >> TCmsTest::RequestRestartServicesRejectSecond >> TCmsTest::RequestRestartServicesMultipleNodes >> JsonProtoConversion::JsonToProtoMap [GOOD] >> TCmsTest::VDisksEvictionShouldFailWhileSentinelIsDisabled >> TCmsTest::ManagePermissions >> TCmsTest::StateRequestNode >> TMaintenanceApiTest::SingleCompositeActionGroup >> TCmsTest::TestForceRestartMode ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::ReadOnlySlay [GOOD] Test command err: RandomSeed# 1974193591400313329 2025-04-17T17:09:52.419118Z 1 00h01m14.361536s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-17T17:09:52.421261Z 1 00h01m14.361536s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 13301006930831846255] 2025-04-17T17:09:52.514345Z 1 00h01m14.361536s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 |73.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::NlohmannJsonToProtoMap [GOOD] |73.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::JsonToProtoArray [GOOD] |73.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::NlohmannJsonToProtoArray [GOOD] >> TDowntimeTest::SetIgnoredDowntimeGap [GOOD] >> TMaintenanceApiTest::CompositeActionGroupSameStorageGroup >> TCmsTest::RequestRestartServicesOk >> TCmsTenatsTest::TestClusterLimit >> JsonProtoConversion::JsonToProtoSingleValue [GOOD] >> TMaintenanceApiTest::ManyActionGroupsWithSingleAction >> TCmsTest::CollectInfo >> TClusterInfoTest::DeviceId [GOOD] >> TClusterInfoTest::FillInfo [GOOD] >> TCmsTenatsTest::CollectInfo >> TCmsTest::RequestReplaceDevices >> TCmsTest::ActionIssuePartialPermissions >> TKesusTest::TestAttachOutOfSequence >> TCmsTest::WalleRebootDownNode >> BSCReadOnlyPDisk::RestartAndReadOnlyConsecutive [GOOD] >> TCmsTenatsTest::TestTenantRatioLimit >> TCmsTenatsTest::TestNoneTenantPolicy >> TCmsTest::TestOutdatedState >> TCmsTest::ManageRequestsWrong >> TKesusTest::TestQuoterHDRRParametersValidation ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::ReadOnlyNotAllowed [GOOD] Test command err: RandomSeed# 4067847540890441370 |73.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::JsonToProtoMap [GOOD] >> TKesusTest::TestAcquireWaiterDowngrade >> TCmsTest::RequestReplaceBrokenDevices >> TCmsTest::TestKeepAvailableModeScheduled >> TCmsTenatsTest::TestClusterRatioLimit >> THDRRQuoterResourceTreeRuntimeTest::TestHierarchicalQuotas [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestHangDefence [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestMoreStrongChildLimit [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveSessionDisconnectsAndThenConnectsAgain [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] >> TKesusTest::TestKesusConfig >> TKesusTest::TestAttachNewSessions >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateResourceSessions [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestStopConsuming [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateConsumptionState [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateConsumptionStateAfterAllResourceAllocated [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestVeryBigWeights [GOOD] >> TKesusTest::TestAcquireLocks |73.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::JsonToProtoSingleValue [GOOD] >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestBlock42 |73.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |73.6%| [LD] {RESULT} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |73.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |73.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |73.6%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |73.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |73.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::RestartAndReadOnlyConsecutive [GOOD] Test command err: RandomSeed# 10883477307207328789 |73.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestVeryBigWeights [GOOD] >> TCmsTest::StateStorageNodesFromOneRing [GOOD] >> TCmsTest::StateStorageTwoBrokenRings |73.6%| [TA] $(B)/ydb/core/http_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TKesusTest::TestAcquireWaiterDowngrade [GOOD] >> TKesusTest::TestAcquireWaiterUpgrade >> TKesusTest::TestAttachOutOfSequence [GOOD] >> TKesusTest::TestAttachOutOfSequenceInTx |73.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |73.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |73.6%| [TA] {RESULT} $(B)/ydb/core/http_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |73.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr >> TKesusTest::TestSessionTimeoutAfterDetach >> TKesusTest::TestSessionDetach >> TKesusTest::TestKesusConfig [GOOD] >> TKesusTest::TestLockNotFound >> TCmsTest::VDisksEvictionShouldFailWhileSentinelIsDisabled [GOOD] >> TCmsTest::VDisksEvictionShouldFailOnUnsupportedAction >> THDRRQuoterResourceTreeRuntimeTest::TestWeights [GOOD] >> TKesusTest::TestQuoterHDRRParametersValidation [GOOD] >> TKesusTest::TestQuoterAccountResourcesOnDemand >> TCmsTest::StateRequest [GOOD] >> TCmsTest::ScheduledEmergencyDuringRollingRestart >> THDRRQuoterResourceTreeRuntimeTest::TestWeightsChange [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestVerySmallSpeed [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaRelease >> TMaintenanceApiTest::CompositeActionGroupSameStorageGroup [GOOD] >> TMaintenanceApiTest::ActionReason >> TCmsTest::TestKeepAvailableMode [GOOD] >> TCmsTest::TestKeepAvailableModeDisconnects >> TKesusTest::TestReleaseLockFailure >> TCmsTest::RequestRestartServicesRejectSecond [GOOD] >> TCmsTest::RequestRestartServicesWrongHost >> TCmsTest::StateRequestNode [GOOD] >> TCmsTest::StateRequestUnknownNode >> TCmsTest::ActionIssuePartialPermissions [GOOD] >> TCmsTest::ActionWithZeroDuration >> TCmsTenatsTest::TestTenantLimit [GOOD] >> TCmsTenatsTest::TestScheduledPermissionWithNonePolicy >> TCmsTest::ManagePermissions [GOOD] >> TCmsTest::ManagePermissionWrongRequest >> THDRRQuoterResourceTreeRuntimeTest::TestCreateInactiveSession [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceSessions [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDistributeResourcesBetweenConsumers [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestEffectiveProps [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceWithActiveChildren [GOOD] >> TKesusTest::TestAttachNewSessions [GOOD] >> TKesusTest::TestAttachMissingSession >> TKesusTest::TestAttachOutOfSequenceInTx [GOOD] >> TKesusTest::TestAttachThenReRegister >> TKesusTest::TestLockNotFound [GOOD] >> TKesusTest::TestDeleteSemaphore >> TKesusTest::TestQuoterResourceDescribe >> TCmsTest::RequestRestartServicesMultipleNodes [GOOD] >> TCmsTest::RequestRestartServicesDryRun >> TCmsTenatsTest::CollectInfo [GOOD] >> TCmsTenatsTest::RequestRestartServices >> TKesusTest::TestAcquireSemaphoreTimeout >> TCmsTest::ManageRequestsWrong [GOOD] >> TCmsTest::ManageRequestsDry >> TCmsTest::TestForceRestartMode [GOOD] >> TCmsTest::TestForceRestartModeDisconnects >> TKesusTest::TestSessionDetach [GOOD] >> TKesusTest::TestSessionDetachFutureId >> THDRRQuoterResourceTreeRuntimeTest::TestAllocateResource [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestAllocationGranularity [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestAmountIsLessThanEpsilon [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestActiveSessionDisconnectsAndThenConnectsAgain [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestActiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] >> TKesusTest::TestQuoterAccountResourcesBurst >> TKesusTest::TestAcquireWaiterUpgrade [GOOD] >> TKesusTest::TestAcquireWaiterChangeTimeoutToZero >> TMaintenanceApiTest::ManyActionGroupsWithSingleAction [GOOD] >> TMaintenanceApiTest::CreateTime >> TKesusTest::TestReleaseLockFailure [GOOD] >> TKesusTest::TestReleaseSemaphore |73.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceWithActiveChildren [GOOD] >> TCmsTenatsTest::TestTenantRatioLimit [GOOD] >> TCmsTenatsTest::TestTenantRatioLimitForceRestartMode >> TCmsTest::CollectInfo [GOOD] >> TCmsTest::DynamicConfig >> TKesusTest::TestAttachMissingSession [GOOD] >> TKesusTest::TestAttachOldGeneration >> TKesusTest::TestAttachThenReRegister [GOOD] >> TKesusTest::TestAcquireUpgrade >> TKesusTest::TestAttachTimeoutTooBig >> TKesusTest::TestAcquireBeforeTimeoutViaRelease [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange >> TKesusTest::TestUnregisterProxy >> TCmsTenatsTest::TestNoneTenantPolicy [GOOD] >> TCmsTenatsTest::TestDefaultTenantPolicyWithSingleTenantHost >> TProxyActorTest::TestAttachSession >> TKesusTest::TestAcquireWaiterChangeTimeoutToZero [GOOD] >> TKesusTest::TestAcquireWaiterRelease >> TCmsTest::RequestRestartServicesOk [GOOD] >> TCmsTest::RequestRestartServicesReject |73.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestActiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] |73.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TKesusTest::TestReleaseSemaphore [GOOD] >> TKesusTest::TestSemaphoreData >> TKesusTest::TestSessionDetachFutureId [GOOD] >> TKesusTest::TestSessionDestroy >> TKesusTest::TestDeleteSemaphore [GOOD] >> TKesusTest::TestDescribeSemaphoreWatches >> TKesusTest::TestQuoterResourceDescribe [GOOD] >> TKesusTest::TestQuoterResourceCreation >> TKesusTest::TestAttachOldGeneration [GOOD] >> TKesusTest::TestAttachFastPath |73.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TKesusTest::TestUnregisterProxy [GOOD] >> TKesusTest::TestUnregisterProxyBadGeneration >> TKesusTest::TestRegisterProxy >> TCmsTenatsTest::TestClusterLimit [GOOD] >> TCmsTenatsTest::RequestShutdownHost >> TCmsTest::TestOutdatedState [GOOD] >> TCmsTest::TestSetResetMarkers |73.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TCmsTest::DynamicConfig [GOOD] >> TCmsTest::DisabledEvictVDisks >> TCmsTest::RequestReplaceDevices [GOOD] >> TCmsTest::RequestReplaceDevicePDisk >> TKesusTest::TestAttachTimeoutTooBig [GOOD] >> TKesusTest::TestAcquireUpgrade [GOOD] >> TKesusTest::TestAcquireTimeout >> TKesusTest::TestCreateSemaphore >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange [GOOD] >> TCmsTest::WalleRebootDownNode [GOOD] >> TCmsTest::WalleCleanupTest >> TKesusTest::TestAcquireWaiterRelease [GOOD] >> TKesusTest::TestAllocatesResources >> TCmsTest::RequestReplaceBrokenDevices [GOOD] >> TCmsTest::PermissionDuration >> TCmsTenatsTest::TestClusterRatioLimit [GOOD] >> TCmsTenatsTest::TestClusterRatioLimitForceRestartMode >> TKesusTest::TestSemaphoreData [GOOD] >> TKesusTest::TestSemaphoreReleaseReacquire >> TKesusTest::TestQuoterResourceCreation [GOOD] >> TKesusTest::TestQuoterResourceModification >> TCmsTest::TestKeepAvailableModeScheduled [GOOD] >> TCmsTest::TestKeepAvailableModeScheduledDisconnects >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestBlock42 [GOOD] >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestMirror3dc >> TKesusTest::TestSessionDestroy [GOOD] >> TKesusTest::TestSessionStealing >> TMaintenanceApiTest::SingleCompositeActionGroup [GOOD] >> TMaintenanceApiTest::SimplifiedMirror3DC >> TKesusTest::TestUnregisterProxyBadGeneration [GOOD] >> TKesusTest::TestSessionTimeoutAfterUnregister >> TProxyActorTest::TestAttachSession [GOOD] >> TCmsTest::RequestRestartServicesWrongHost [GOOD] >> TCmsTest::RestartNodeInDownState >> TKesusTest::TestAttachFastPath [GOOD] >> TKesusTest::TestAttachFastPathBlocked ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange [GOOD] Test command err: 2025-04-17T17:10:05.585708Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:05.585815Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:05.602782Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:05.603132Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:05.628586Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:05.630917Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=987570552902191565, session=0, seqNo=0) 2025-04-17T17:10:05.631235Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:10:05.645439Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=987570552902191565, session=1) 2025-04-17T17:10:05.646086Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:133:2159], cookie=11586269098696964000, session=0, seqNo=0) 2025-04-17T17:10:05.646269Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-17T17:10:05.660248Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:133:2159], cookie=11586269098696964000, session=2) 2025-04-17T17:10:05.661592Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:132:2158], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-17T17:10:05.661757Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-17T17:10:05.661844Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-17T17:10:05.677431Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:132:2158], cookie=111) 2025-04-17T17:10:05.677888Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:132:2158], cookie=112, session=1, semaphore="Lock2" count=1) 2025-04-17T17:10:05.678040Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2025-04-17T17:10:05.678161Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2025-04-17T17:10:05.695278Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:132:2158], cookie=112) 2025-04-17T17:10:05.696148Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:132:2158], cookie=333, name="Lock1") 2025-04-17T17:10:05.696983Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2025-04-17T17:10:05.697504Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:133:2159], cookie=222, session=2, semaphore="Lock1" count=1) 2025-04-17T17:10:05.697784Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 3 "Lock1" 2025-04-17T17:10:05.698106Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 3 "Lock1" queue: next order #3 session 2 2025-04-17T17:10:05.698385Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:133:2159], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2025-04-17T17:10:05.716375Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:132:2158], cookie=333) 2025-04-17T17:10:05.716517Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:133:2159], cookie=222) 2025-04-17T17:10:05.716753Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:133:2159], cookie=223) 2025-04-17T17:10:05.717548Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:132:2158], cookie=334, name="Lock2") 2025-04-17T17:10:05.717700Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 2 "Lock2" owner link 2025-04-17T17:10:05.717867Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-04-17T17:10:05.732824Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:132:2158], cookie=334) 2025-04-17T17:10:05.735426Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:161:2185], cookie=9014712613453105448, name="Lock1") 2025-04-17T17:10:05.735566Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:161:2185], cookie=9014712613453105448) 2025-04-17T17:10:05.739011Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:164:2188], cookie=4866912932210020343, name="Lock2") 2025-04-17T17:10:05.739206Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:164:2188], cookie=4866912932210020343) 2025-04-17T17:10:05.766959Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:05.767101Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:05.769102Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:05.770112Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:05.809352Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:05.809520Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-04-17T17:10:05.809571Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 3 "Lock1" queue: next order #3 session 2 2025-04-17T17:10:05.809913Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:204:2218], cookie=9706064393361805858, name="Lock1") 2025-04-17T17:10:05.810004Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:204:2218], cookie=9706064393361805858) 2025-04-17T17:10:05.810657Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:212:2225], cookie=8722319219427657138, name="Lock2") 2025-04-17T17:10:05.810725Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:212:2225], cookie=8722319219427657138) 2025-04-17T17:10:06.430636Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:06.430727Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:06.444603Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:06.444691Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:06.468653Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:06.469360Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:132:2158], cookie=6930004167356222180, session=0, seqNo=0) 2025-04-17T17:10:06.469486Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:10:06.484819Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:132:2158], cookie=6930004167356222180, session=1) 2025-04-17T17:10:06.485136Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:133:2159], cookie=1603576570526073148, session=0, seqNo=0) 2025-04-17T17:10:06.485255Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-17T17:10:06.497699Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:133:2159], cookie=1603576570526073148, session=2) 2025-04-17T17:10:06.498630Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:132:2158], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-17T17:10:06.498760Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-17T17:10:06.498839Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-17T17:10:06.510573Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:132:2158], cookie=111) 2025-04-17T17:10:06.510849Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:132:2158], cookie=112, session=1, semaphore="Lock2" count=1) 2025-04-17T17:10:06.510956Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2025-04-17T17:10:06.511023Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2025-04-17T17:10:06.522700Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:132:2158], cookie=112) 2025-04-17T17:10:06.523039Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:132:2158], cookie=333, session=1, semaphore="Lock1" count=1) 2025-04-17T17:10:06.523244Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:133:2159], cookie=222, session=2, semaphore="Lock1" count=1) 2025-04-17T17:10:06.523339Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-04-17T17:10:06.523432Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:133:2159], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2025-04-17T17:10:06.534772Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:132:2158], cookie=333) 2025-04-17T17:10:06.534828Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:133:2159], cookie=222) 2025-04-17T17:10:06.534846Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:133:2159], cookie=223) 2025-04-17T17:10:06.535211Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:159:2183], cookie=16486933044802147578, name="Lock1") 2025-04-17T17:10:06.535279Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:159:2183], cookie=16486933044802147578) 2025-04-17T17:10:06.535570Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:162:2186], cookie=8227737133225620898, name="Lock2") 2025-04-17T17:10:06.535635Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:162:2186], cookie=8227737133225620898) 2025-04-17T17:10:06.536035Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:165:2189], cookie=7526903795108126277, name="Lock1") 2025-04-17T17:10:06.536099Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:165:2189], cookie=7526903795108126277) 2025-04-17T17:10:06.536555Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:168:2192], cookie=1890345952240917327, name="Lock2") 2025-04-17T17:10:06.536602Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:168:2192], cookie=1890345952240917327) 2025-04-17T17:10:06.536757Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:133:2159], cookie=444, session=2, semaphore="Lock2" count=1) 2025-04-17T17:10:06.536843Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-04-17T17:10:06.548221Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:133:2159], cookie=444) 2025-04-17T17:10:06.548711Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:173:2197], cookie=5153394841061977543, name="Lock2") 2025-04-17T17:10:06.548772Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:173:2197], cookie=5153394841061977543) 2025-04-17T17:10:06.549072Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:176:2200], cookie=5989950916623188125, name="Lock2") 2025-04-17T17:10:06.549113Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:176:2200], cookie=5989950916623188125) 2025-04-17T17:10:06.559385Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:06.559458Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:06.559926Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:06.560178Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:06.618375Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:06.618561Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-17T17:10:06.618629Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-04-17T17:10:06.618663Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2025-04-17T17:10:06.618678Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-04-17T17:10:06.619517Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:216:2230], cookie=10323828154676758311, name="Lock1") 2025-04-17T17:10:06.619671Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:216:2230], cookie=10323828154676758311) 2025-04-17T17:10:06.620235Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:224:2237], cookie=16139547917048676836, name="Lock2") 2025-04-17T17:10:06.620298Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:224:2237], cookie=16139547917048676836) >> TCmsTest::StateRequestUnknownNode [GOOD] >> TCmsTest::StateRequestUnknownMultipleNodes >> TCmsTest::VDisksEvictionShouldFailOnUnsupportedAction [GOOD] >> TCmsTest::VDisksEvictionShouldFailOnMultipleActions >> TCmsTest::ScheduledEmergencyDuringRollingRestart [GOOD] >> TCmsTest::ScheduledWalleRequestDuringRollingRestart >> TCmsTest::ManagePermissionWrongRequest [GOOD] >> TCmsTest::ManageRequests |73.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |73.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |73.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace >> TCmsTest::TestKeepAvailableModeDisconnects [GOOD] >> TCmsTest::TestForceRestartModeScheduled >> TMaintenanceApiTest::ActionReason [GOOD] >> TCmsTest::StateStorageTwoBrokenRings [GOOD] >> TCmsTest::StateStorageRollingRestart >> TKesusTest::TestQuoterAccountResourcesOnDemand [GOOD] >> TKesusTest::TestQuoterAccountResourcesPaced >> TCmsTest::ActionWithZeroDuration [GOOD] >> TCmsTest::CheckUnreplicatedDiskPreventsRestart >> TCmsTenatsTest::TestScheduledPermissionWithNonePolicy [GOOD] >> TCmsTenatsTest::TestTenantLimitForceRestartMode >> TCmsTest::TestForceRestartModeDisconnects [GOOD] >> TCmsTest::StateStorageTwoRings >> TKesusTest::TestRegisterProxy [GOOD] >> TKesusTest::TestRegisterProxyBadGeneration >> TCmsTest::RequestRestartServicesDryRun [GOOD] >> TCmsTest::RequestReplacePDiskDoesntBreakGroup >> TKesusTest::TestAllocatesResources [GOOD] >> TMaintenanceApiTest::CreateTime [GOOD] >> TMaintenanceApiTest::LastRefreshTime |73.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestAttachSession [GOOD] |73.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TKesusTest::TestQuoterAccountResourcesBurst [GOOD] >> TKesusTest::TestQuoterAccountResourcesAggregateClients >> TCmsTest::ManageRequestsDry [GOOD] >> TCmsTest::Notifications >> TCmsTenatsTest::TestTenantRatioLimitForceRestartMode [GOOD] >> TKesusTest::TestCreateSemaphore [GOOD] >> TCmsTenatsTest::TestTenantRatioLimitForceRestartModeScheduled >> TCmsTenatsTest::TestDefaultTenantPolicyWithSingleTenantHost [GOOD] >> TCmsTenatsTest::TestLimitsWithDownNode >> TKesusTest::TestDescribeSemaphoreWatches [GOOD] >> TKesusTest::TestGetQuoterResourceCounters |73.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TMaintenanceApiTest::ActionReason [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAllocatesResources [GOOD] Test command err: 2025-04-17T17:10:02.435048Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:02.435160Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:02.522455Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:02.523692Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:02.597726Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:02.604471Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=14818902606563163467, session=0, seqNo=0) 2025-04-17T17:10:02.605724Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:10:02.626281Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=14818902606563163467, session=1) 2025-04-17T17:10:02.627906Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=16244455728731043598, session=0, seqNo=0) 2025-04-17T17:10:02.631968Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-17T17:10:02.657818Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=16244455728731043598, session=2) 2025-04-17T17:10:02.659877Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:132:2158], cookie=111, session=1, semaphore="Lock1" count=1) 2025-04-17T17:10:02.660777Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-17T17:10:02.661837Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-17T17:10:02.691332Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:132:2158], cookie=111) 2025-04-17T17:10:02.693070Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:132:2158], cookie=222, session=2, semaphore="Lock1" count=18446744073709551615) 2025-04-17T17:10:02.694018Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:132:2158], cookie=333, session=2, semaphore="Lock1" count=1) 2025-04-17T17:10:02.694228Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #2 session 2 2025-04-17T17:10:02.718682Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:132:2158], cookie=222) 2025-04-17T17:10:02.719065Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:132:2158], cookie=333) 2025-04-17T17:10:02.721412Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:148:2172], cookie=6297977667387107376, name="Lock1") 2025-04-17T17:10:02.721921Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:148:2172], cookie=6297977667387107376) 2025-04-17T17:10:05.358529Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:05.358766Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:05.384986Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:05.385127Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:05.403062Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:05.403527Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:130:2156], cookie=12150073775975681494, session=0, seqNo=0) 2025-04-17T17:10:05.403673Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:10:05.425891Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:130:2156], cookie=12150073775975681494, session=1) 2025-04-17T17:10:05.426107Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:130:2156], cookie=18270392257862627297, session=0, seqNo=0) 2025-04-17T17:10:05.426183Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-17T17:10:05.438909Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:130:2156], cookie=18270392257862627297, session=2) 2025-04-17T17:10:05.439106Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:130:2156], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-17T17:10:05.439197Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-17T17:10:05.439283Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-17T17:10:05.453684Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:130:2156], cookie=111) 2025-04-17T17:10:05.453880Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:130:2156], cookie=222, session=2, semaphore="Lock1" count=1) 2025-04-17T17:10:05.454048Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:130:2156], cookie=333, session=2, semaphore="Lock1" count=18446744073709551615) 2025-04-17T17:10:05.465518Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:130:2156], cookie=222) 2025-04-17T17:10:05.465592Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:130:2156], cookie=333) 2025-04-17T17:10:05.466000Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:146:2170], cookie=11343513207509811034, name="Lock1") 2025-04-17T17:10:05.466077Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:146:2170], cookie=11343513207509811034) 2025-04-17T17:10:05.466483Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:149:2173], cookie=13888671254506004785, name="Lock1") 2025-04-17T17:10:05.466547Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:149:2173], cookie=13888671254506004785) 2025-04-17T17:10:05.893038Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:05.893123Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:05.907598Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:05.907705Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:05.920850Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:05.921270Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:130:2156], cookie=5309432249892741101, session=0, seqNo=0) 2025-04-17T17:10:05.921398Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:10:05.943611Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:130:2156], cookie=5309432249892741101, session=1) 2025-04-17T17:10:05.943880Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:130:2156], cookie=6186551121417200429, session=0, seqNo=0) 2025-04-17T17:10:05.943994Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-17T17:10:05.957426Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:130:2156], cookie=6186551121417200429, session=2) 2025-04-17T17:10:05.958055Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:130:2156], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-17T17:10:05.958187Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-17T17:10:05.958274Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-17T17:10:05.969871Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:130:2156], cookie=111) 2025-04-17T17:10:05.970176Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:130:2156], cookie=222, session=2, semaphore="Lock1" count=1) 2025-04-17T17:10:05.970452Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:130:2156], cookie=333, session=2, semaphore="Lock1" count=1) 2025-04-17T17:10:05.970524Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2025-04-17T17:10:05.982545Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:130:2156], cookie=222) 2025-04-17T17:10:05.982609Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:130:2156], cookie=333) 2025-04-17T17:10:05.983124Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:149:2173], cookie=14938764377155247575, name="Lock1") 2025-04-17T17:10:05.983193Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:149:2173], cookie=14938764377155247575) 2025-04-17T17:10:05.983592Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:152:2176], cookie=4088472504163170194, name="Lock1") 2025-04-17T17:10:05.983650Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:152:2176], cookie=4088472504163170194) 2025-04-17T17:10:05.994696Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:05.994768Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:05.995191Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:05.995547Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:06.030461Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:06.030550Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-17T17:10:06.030797Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:192:2206], cookie=13774923296741851656, name="Lock1") 2025-04-17T17:10:06.030842Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:192:2206], cookie=13774923296741851656) 2025-04-17T17:10:06.031154Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:200:2213], cookie=7136159983224406958, name="Lock1") 2025-04-17T17:10:06.031196Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:200:2213], cookie=7136159983224406958) 2025-04-17T17:10:06.530129Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:06.530228Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:06.546067Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:06.546167Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:06.558955Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:06.559316Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:130:2156], cookie=8749043011929327555, session=0, seqNo=0) 2025-04-17T17:10:06.559422Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:10:06.581951Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:130:2156], cookie=8749043011929327555, session=1) 2025-04-17T17:10:06.582151Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:130:2156], cookie=14731732385111362159, session=0, seqNo=0) 2025-04-17T17:10:06.582265Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-17T17:10:06.593682Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:130:2156], cookie=14731732385111362159, session=2) 2025-04-17T17:10:06.593886Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-17T17:10:06.593989Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-17T17:10:06.594058Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-17T17:10:06.605407Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=111) 2025-04-17T17:10:06.605588Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=222, session=2, semaphore="Lock1" count=1) 2025-04-17T17:10:06.605832Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:130:2156], cookie=333, name="Lock1") 2025-04-17T17:10:06.605905Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2025-04-17T17:10:06.617466Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=222) 2025-04-17T17:10:06.617532Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:130:2156], cookie=333) 2025-04-17T17:10:07.474459Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:07.474542Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:07.489219Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:07.489558Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:07.514265Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:07.525126Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:130:2156], cookie=5269905973063332332, path="/Root", config={ MaxUnitsPerSecond: 100 }) 2025-04-17T17:10:07.526074Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-17T17:10:07.556066Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:130:2156], cookie=5269905973063332332) 2025-04-17T17:10:07.558016Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:139:2163], cookie=3185185617701876110, path="/Root/Res", config={ }) 2025-04-17T17:10:07.559105Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-04-17T17:10:07.574136Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:139:2163], cookie=3185185617701876110) 2025-04-17T17:10:07.575192Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:144:2168]. Cookie: 7536715838025431878. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-17T17:10:07.575240Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:144:2168], cookie=7536715838025431878) 2025-04-17T17:10:07.575660Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvUpdateConsumptionStateAck to [5:144:2168]. Cookie: 1335542539789105188. Data: { } 2025-04-17T17:10:07.575691Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Update quoter resources consumption state (sender=[5:144:2168], cookie=1335542539789105188) 2025-04-17T17:10:07.624458Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:144:2168]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-04-17T17:10:07.672385Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:144:2168]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-04-17T17:10:07.704886Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:144:2168]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-04-17T17:10:07.749767Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:144:2168]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-04-17T17:10:07.780993Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:144:2168]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestCreateSemaphore [GOOD] Test command err: 2025-04-17T17:10:03.029447Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:03.030380Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:03.130617Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:03.131515Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:03.197369Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:03.201039Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=15363725681763965827, session=0, seqNo=222) 2025-04-17T17:10:03.201932Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:10:03.233719Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=15363725681763965827, session=1) 2025-04-17T17:10:03.234764Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:133:2159], cookie=9543557761440506960, session=1, seqNo=111) 2025-04-17T17:10:03.262842Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:133:2159], cookie=9543557761440506960, session=1) 2025-04-17T17:10:05.367475Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:05.367552Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:05.381575Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:05.381675Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:05.407297Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:05.407709Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:132:2158], cookie=111, session=0, seqNo=42) 2025-04-17T17:10:05.407844Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:10:05.407987Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:132:2158], cookie=222, session=1, seqNo=41) 2025-04-17T17:10:05.420959Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:132:2158], cookie=111, session=1) 2025-04-17T17:10:05.421047Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:132:2158], cookie=222, session=1) 2025-04-17T17:10:05.694956Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:05.695038Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:05.708679Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:05.708751Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:05.722230Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:05.722661Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:130:2156], cookie=17478604456529977417, session=0, seqNo=0) 2025-04-17T17:10:05.722810Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:10:05.752857Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:130:2156], cookie=17478604456529977417, session=1) 2025-04-17T17:10:05.754345Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:147:2171], cookie=8743402471245462588) 2025-04-17T17:10:05.754426Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:147:2171], cookie=8743402471245462588) 2025-04-17T17:10:06.436437Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:06.436557Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:06.453053Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:06.453166Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:06.467502Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:07.552927Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:07.553235Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:07.584945Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:07.587078Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:07.617561Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:07.617948Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=11635065582827550853, session=0, seqNo=0) 2025-04-17T17:10:07.618064Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:10:07.634467Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=11635065582827550853, session=1) 2025-04-17T17:10:07.635258Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-17T17:10:07.635817Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-17T17:10:07.636024Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-17T17:10:07.654885Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=111) 2025-04-17T17:10:07.657741Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:146:2170], cookie=4385063770581983254, name="Sem1", limit=42) 2025-04-17T17:10:07.658043Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 2 "Sem1" 2025-04-17T17:10:07.675845Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:146:2170], cookie=4385063770581983254) 2025-04-17T17:10:07.676324Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:151:2175], cookie=18411039711434195268, name="Sem1", limit=42) 2025-04-17T17:10:07.695133Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:151:2175], cookie=18411039711434195268) 2025-04-17T17:10:07.695664Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:156:2180], cookie=7011496338344374216, name="Sem1", limit=51) 2025-04-17T17:10:07.707474Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:156:2180], cookie=7011496338344374216) 2025-04-17T17:10:07.710986Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:161:2185], cookie=17115451480455909761, name="Lock1", limit=42) 2025-04-17T17:10:07.725213Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:161:2185], cookie=17115451480455909761) 2025-04-17T17:10:07.725618Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:166:2190], cookie=11068699993719370825, name="Lock1", limit=18446744073709551615) 2025-04-17T17:10:07.745799Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:166:2190], cookie=11068699993719370825) 2025-04-17T17:10:07.748092Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:171:2195], cookie=12524795944987176970, name="Sem1") 2025-04-17T17:10:07.748672Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:171:2195], cookie=12524795944987176970) 2025-04-17T17:10:07.752059Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:174:2198], cookie=8255289982239683384, name="Sem2") 2025-04-17T17:10:07.753114Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:174:2198], cookie=8255289982239683384) 2025-04-17T17:10:07.808953Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:07.809028Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:07.809514Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:07.810048Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:07.832731Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:07.832861Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-17T17:10:07.833838Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:214:2228], cookie=18164608432865366918, name="Sem1") 2025-04-17T17:10:07.834302Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:214:2228], cookie=18164608432865366918) 2025-04-17T17:10:07.836917Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:221:2234], cookie=4653477771823308952, name="Sem2") 2025-04-17T17:10:07.836988Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:221:2234], cookie=4653477771823308952) >> TKesusTest::TestSessionStealing [GOOD] >> TKesusTest::TestSessionStealingAnyKey >> TKesusTest::TestAttachFastPathBlocked [GOOD] >> TCmsTest::RequestRestartServicesReject [GOOD] >> TCmsTest::RequestRestartServicesPartial >> TKesusTest::TestRegisterProxyBadGeneration [GOOD] >> TKesusTest::TestRegisterProxyFromDeadActor >> TKesusTest::TestQuoterResourceModification [GOOD] >> TKesusTest::TestQuoterResourceDeletion >> TCmsTest::RequestReplaceDevicePDisk [GOOD] >> TCmsTest::RequestReplaceDevicePDiskByPath >> TKesusTest::TestSemaphoreReleaseReacquire [GOOD] >> TKesusTest::TestSemaphoreSessionFailures >> TMaintenanceApiTest::SimplifiedMirror3DC [GOOD] >> TMaintenanceApiTest::RequestReplaceDevicePDisk >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorks >> TKeyValueTest::TestInlineWriteReadWithRestartsWithNotCorrectUTF8NewApi >> TKesusTest::TestGetQuoterResourceCounters [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAttachFastPathBlocked [GOOD] Test command err: 2025-04-17T17:10:05.110272Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:05.110403Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:05.169424Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:05.170110Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:05.215468Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:05.215924Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=1333681442026983621, session=0, seqNo=0) 2025-04-17T17:10:05.216050Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:10:05.241519Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=1333681442026983621, session=1) 2025-04-17T17:10:05.242752Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=10956526475895552916, session=0, seqNo=0) 2025-04-17T17:10:05.243474Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-17T17:10:05.263915Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=10956526475895552916, session=2) 2025-04-17T17:10:05.676031Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:05.676139Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:05.691407Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:05.691502Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:05.715227Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:05.716002Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:132:2158], cookie=7535645597498013953, session=1, seqNo=0) 2025-04-17T17:10:05.729535Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:132:2158], cookie=7535645597498013953, session=1) 2025-04-17T17:10:06.218009Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:06.218101Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:06.234772Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:06.234843Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:06.249798Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:06.251269Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:130:2156], cookie=14710468804997480538, session=0, seqNo=0) 2025-04-17T17:10:06.251526Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:10:06.275323Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:130:2156], cookie=14710468804997480538, session=1) 2025-04-17T17:10:06.868671Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:06.868774Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:06.881138Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:06.881209Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:06.895463Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:06.895669Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[4:130:2156], cookie=6051844551107495282, path="") 2025-04-17T17:10:06.917891Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[4:130:2156], cookie=6051844551107495282, status=SUCCESS) 2025-04-17T17:10:06.919285Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:139:2163], cookie=14133748087567492606, session=0, seqNo=0) 2025-04-17T17:10:06.919403Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:10:06.933113Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:139:2163], cookie=14133748087567492606, session=1) 2025-04-17T17:10:06.933976Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:140:2164], cookie=111, session=0, seqNo=0) 2025-04-17T17:10:06.934234Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-17T17:10:06.934548Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Fast-path attach session=1 to sender=[4:140:2164], cookie=222, seqNo=0 2025-04-17T17:10:06.950072Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:140:2164], cookie=111, session=2) 2025-04-17T17:10:08.672531Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:08.672620Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:08.697155Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:08.697460Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:08.733076Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:08.733362Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[5:132:2158], cookie=10361805836717409453, path="") 2025-04-17T17:10:08.759128Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[5:132:2158], cookie=10361805836717409453, status=SUCCESS) 2025-04-17T17:10:08.761900Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:141:2165], cookie=9908429644849944334, session=0, seqNo=0) 2025-04-17T17:10:08.762636Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:10:08.778755Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:141:2165], cookie=9908429644849944334, session=1) 2025-04-17T17:10:08.780277Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:141:2165], cookie=123, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-17T17:10:08.781268Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-17T17:10:08.782005Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-17T17:10:08.783104Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:142:2166], cookie=111, session=0, seqNo=0) 2025-04-17T17:10:08.783481Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-17T17:10:08.784263Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:142:2166], cookie=222, session=1, seqNo=0) 2025-04-17T17:10:08.804681Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:141:2165], cookie=123) 2025-04-17T17:10:08.804754Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:142:2166], cookie=111, session=2) 2025-04-17T17:10:08.805085Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:142:2166], cookie=222, session=1) >> TKeyValueTest::TestObtainLockNewApi >> TCmsTest::StateStorageTwoRings [GOOD] >> TCmsTest::SysTabletsNode >> TKesusTest::TestSessionStealingAnyKey [GOOD] >> TCmsTest::TestSetResetMarkers [GOOD] >> TCmsTest::TestProcessingQueue >> TKesusTest::TestRegisterProxyFromDeadActor [GOOD] >> TKesusTest::TestRegisterProxyLinkFailure >> TKeyValueTest::TestIncorrectRequestThenResponseError >> TCmsTest::StateRequestUnknownMultipleNodes [GOOD] >> TCmsTest::StateStorageAvailabilityMode >> TCmsTest::PermissionDuration [GOOD] >> TCmsTest::RacyStartCollecting |73.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestGetQuoterResourceCounters [GOOD] Test command err: 2025-04-17T17:10:03.323201Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:03.323295Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:03.387432Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:03.388461Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:03.435091Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:03.437078Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[1:132:2158], cookie=11879727003200147924, path="/foo/bar/baz") 2025-04-17T17:10:03.463392Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[1:132:2158], cookie=11879727003200147924, status=SUCCESS) 2025-04-17T17:10:03.466260Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Execute (sender=[1:141:2165], cookie=4612994550165031029) 2025-04-17T17:10:03.488365Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Complete (sender=[1:141:2165], cookie=4612994550165031029) 2025-04-17T17:10:03.490835Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[1:146:2170], cookie=18383912061173415047, path="/foo/bar/baz") 2025-04-17T17:10:03.519062Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[1:146:2170], cookie=18383912061173415047, status=SUCCESS) 2025-04-17T17:10:03.522087Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Execute (sender=[1:151:2175], cookie=10340074435860299731) 2025-04-17T17:10:03.556805Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Complete (sender=[1:151:2175], cookie=10340074435860299731) 2025-04-17T17:10:03.626284Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:03.626850Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:03.631459Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:03.636200Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:03.663293Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:03.665021Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Execute (sender=[1:193:2207], cookie=2843727294379656457) 2025-04-17T17:10:03.704681Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Complete (sender=[1:193:2207], cookie=2843727294379656457) 2025-04-17T17:10:03.708908Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[1:201:2214], cookie=7722892222921254316, path="/foo/bar/baz") 2025-04-17T17:10:03.735539Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[1:201:2214], cookie=7722892222921254316, status=SUCCESS) 2025-04-17T17:10:03.738077Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[1:206:2219], cookie=8148708730384585791, path="/foo/bar/baz") 2025-04-17T17:10:03.738489Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[1:206:2219], cookie=8148708730384585791, status=PRECONDITION_FAILED) 2025-04-17T17:10:05.412057Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:05.412157Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:05.429273Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:05.429359Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:05.452605Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:05.452813Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:132:2158], cookie=15122107081109501890, name="Lock1") 2025-04-17T17:10:05.452904Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:132:2158], cookie=15122107081109501890) 2025-04-17T17:10:05.959380Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:05.959457Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:05.972454Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:05.972558Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:05.988823Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:05.989130Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:130:2156], cookie=991185220842068886, session=0, seqNo=0) 2025-04-17T17:10:05.989222Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:10:06.011778Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:130:2156], cookie=991185220842068886, session=1) 2025-04-17T17:10:06.012071Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:130:2156], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-17T17:10:06.012220Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-17T17:10:06.012297Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-17T17:10:06.023709Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:130:2156], cookie=111) 2025-04-17T17:10:06.024045Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:141:2165], cookie=13489171872885636540, name="Lock1", force=0) 2025-04-17T17:10:06.035415Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:141:2165], cookie=13489171872885636540) 2025-04-17T17:10:06.035806Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:146:2170], cookie=1389874774088225573, name="Sem1", force=0) 2025-04-17T17:10:06.047450Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:146:2170], cookie=1389874774088225573) 2025-04-17T17:10:06.047889Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:151:2175], cookie=15467677592991953243, name="Sem1", limit=42) 2025-04-17T17:10:06.048003Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 2 "Sem1" 2025-04-17T17:10:06.059545Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:151:2175], cookie=15467677592991953243) 2025-04-17T17:10:06.059983Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:156:2180], cookie=2170091787038255960, name="Sem1", force=0) 2025-04-17T17:10:06.060040Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting semaphore 2 "Sem1" 2025-04-17T17:10:06.071710Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:156:2180], cookie=2170091787038255960) 2025-04-17T17:10:06.072172Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:161:2185], cookie=3779674272415500248, name="Sem1", force=0) 2025-04-17T17:10:06.083796Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:161:2185], cookie=3779674272415500248) 2025-04-17T17:10:06.551426Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:06.551505Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:06.566754Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:06.566900Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:06.581493Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:06.582213Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:130:2156], cookie=2522292022602667327, session=0, seqNo=0) 2025-04-17T17:10:06.582461Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:10:06.604941Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:130:2156], cookie=2522292022602667327, session=1) 2025-04-17T17:10:06.605195Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:130:2156], cookie=16469043300296528270, session=0, seqNo=0) 2025-04-17T17:10:06.605383Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-17T17:10:06.619071Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:130:2156], cookie=16469043300296528270, session=2) 2025-04-17T17:10:06.619259Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Fast-path detach session=2 from sender=[4:130:2156], cookie=323088096016789669 2025-04-17T17:10:06.619673Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[4:142:2166], cookie=5334831342931201396, name="Sem1", limit=3) 2025-04-17T17:10:06.619879Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-04-17T17:10:06.631307Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:142:2166], cookie=5334831342931201396) 2025-04-17T17:10:06.631497Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=112, name="Sem1") 2025-04-17T17:10:06.631543Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=112) 2025-04-17T17:10:06.631836Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=113, name="Sem1") 2025-04-17T17:10:06.632050Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=113) 2025-04-17T17:10:06.632385Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:130:2156], cookie=17964221573792263972, session=2, seqNo=0) 2025-04-17T17:10:06.644159Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:130:2156], cookie=17964221573792263972, session=2) 2025-04-17T17:10:06.644405Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=114, name="Sem1") 2025-04-17T17:10:06.644481Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=114) 2025-04-17T17:10:06.644686Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=115, name="Sem1") 2025-04-17T17:10:06.644979Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=115) 2025-04-17T17:10:06.646077Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[4:149:2173], cookie=2990738331734553355, name="Sem1") 2025-04-17T17:10:06.659634Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[4:149:2173], cookie=2990738331734553355) 2025-04-17T17:10:06.660277Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=116, session=1, semaphore="Sem1" count=1) 2025-04-17T17:10:06.660777Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-04-17T17:10:06.673031Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=116) 2025-04-17T17:10:06.673292Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=117, session=2, semaphore="Sem1" count=2) 2025-04-17T17:10:06.673384Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2025-04-17T17:10:06.684848Z node 4 :KESUS_TABLET DEBUG: [7205759403792 ... 661475Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 2 "Sem2" 2025-04-17T17:10:07.673894Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:200:2218], cookie=2435792449292635042) 2025-04-17T17:10:07.674159Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=126, session=1, semaphore="Sem2" count=3) 2025-04-17T17:10:07.674259Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Sem2" queue: next order #5 session 1 2025-04-17T17:10:07.697292Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=126) 2025-04-17T17:10:07.697563Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=127, name="Sem2") 2025-04-17T17:10:07.697614Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=127) 2025-04-17T17:10:07.697833Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=128, session=1, semaphore="Sem2" count=3) 2025-04-17T17:10:07.712000Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=128) 2025-04-17T17:10:08.104600Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:08.121057Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:08.135381Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=129, session=1, semaphore="Sem2" count=2) 2025-04-17T17:10:08.153028Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=129) 2025-04-17T17:10:08.153938Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=130, name="Sem2") 2025-04-17T17:10:08.154141Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=130) 2025-04-17T17:10:08.154852Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=131, session=1, semaphore="Sem2" count=1) 2025-04-17T17:10:08.177580Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=131) 2025-04-17T17:10:08.179110Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=132, name="Sem2") 2025-04-17T17:10:08.179456Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=132) 2025-04-17T17:10:08.180624Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=133, name="Sem2") 2025-04-17T17:10:08.180791Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=133) 2025-04-17T17:10:09.344252Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:09.344373Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:09.365535Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:09.367584Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:09.403781Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:09.442341Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:132:2158], cookie=8901639912055046825, path="/Root1", config={ MaxUnitsPerSecond: 1000 }) 2025-04-17T17:10:09.443355Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root1" 2025-04-17T17:10:09.465374Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:132:2158], cookie=8901639912055046825) 2025-04-17T17:10:09.466751Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:141:2165], cookie=16294027845409941470, path="/Root1/Res", config={ }) 2025-04-17T17:10:09.467586Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root1/Res" 2025-04-17T17:10:09.480224Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:141:2165], cookie=16294027845409941470) 2025-04-17T17:10:09.480805Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:146:2170], cookie=15690212355817402868, path="/Root2", config={ MaxUnitsPerSecond: 1000 }) 2025-04-17T17:10:09.480954Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 3 "Root2" 2025-04-17T17:10:09.498963Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:146:2170], cookie=15690212355817402868) 2025-04-17T17:10:09.499565Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:151:2175], cookie=4972366399616924829, path="/Root2/Res", config={ }) 2025-04-17T17:10:09.499786Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 4 "Root2/Res" 2025-04-17T17:10:09.515327Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:151:2175], cookie=4972366399616924829) 2025-04-17T17:10:09.515834Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:156:2180], cookie=9655254847187238067, path="/Root2/Res/Subres", config={ }) 2025-04-17T17:10:09.516013Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 5 "Root2/Res/Subres" 2025-04-17T17:10:09.532857Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:156:2180], cookie=9655254847187238067) 2025-04-17T17:10:09.533978Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:161:2185]. Cookie: 7960002957656923733. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root1/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-17T17:10:09.534037Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:161:2185], cookie=7960002957656923733) 2025-04-17T17:10:09.575751Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:161:2185]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-04-17T17:10:09.623290Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:161:2185]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-04-17T17:10:09.656709Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:161:2185]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-04-17T17:10:09.658559Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:168:2189]. Cookie: 15624743660355343544. Data: { ResourceCounters { ResourcePath: "Root2/Res" } ResourceCounters { ResourcePath: "Root2/Res/Subres" } ResourceCounters { ResourcePath: "Root2" } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 300 } ResourceCounters { ResourcePath: "Root1" Allocated: 300 } } 2025-04-17T17:10:09.660951Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:171:2192]. Cookie: 11586888260008561688. Data: { Results { ResourceId: 5 Error { Status: SUCCESS } EffectiveProps { ResourceId: 5 ResourcePath: "Root2/Res/Subres" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-17T17:10:09.661117Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:171:2192], cookie=11586888260008561688) 2025-04-17T17:10:09.696694Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:171:2192]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-04-17T17:10:09.744979Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:171:2192]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-04-17T17:10:09.745627Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:176:2196]. Cookie: 4440415241543710651. Data: { ResourceCounters { ResourcePath: "Root2/Res" Allocated: 200 } ResourceCounters { ResourcePath: "Root2/Res/Subres" Allocated: 200 } ResourceCounters { ResourcePath: "Root2" Allocated: 200 } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 300 } ResourceCounters { ResourcePath: "Root1" Allocated: 300 } } 2025-04-17T17:10:09.746317Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:161:2185]. Cookie: 6801170766987720852. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root1/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-17T17:10:09.746369Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:161:2185], cookie=6801170766987720852) 2025-04-17T17:10:09.746949Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:171:2192]. Cookie: 1823757422793044907. Data: { Results { ResourceId: 5 Error { Status: SUCCESS } EffectiveProps { ResourceId: 5 ResourcePath: "Root2/Res/Subres" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-17T17:10:09.746995Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:171:2192], cookie=1823757422793044907) 2025-04-17T17:10:09.778889Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:161:2185]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 20 StateNotification { Status: SUCCESS } } } 2025-04-17T17:10:09.778977Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:171:2192]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 50 StateNotification { Status: SUCCESS } } } 2025-04-17T17:10:09.779503Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:183:2203]. Cookie: 3609060005301861098. Data: { ResourceCounters { ResourcePath: "Root2/Res" Allocated: 250 } ResourceCounters { ResourcePath: "Root2/Res/Subres" Allocated: 250 } ResourceCounters { ResourcePath: "Root2" Allocated: 250 } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 320 } ResourceCounters { ResourcePath: "Root1" Allocated: 320 } } >> TKesusTest::TestQuoterResourceDeletion [GOOD] >> TKesusTest::TestQuoterSubscribeOnResource >> TCmsTest::WalleCleanupTest [GOOD] >> TCmsTest::WalleRequestDuringRollingRestart >> TCmsTest::TestKeepAvailableModeScheduledDisconnects [GOOD] >> TCmsTest::TestLoadLog >> TKesusTest::TestQuoterAccountResourcesPaced [GOOD] >> TKesusTest::TestQuoterAccountResourcesDeduplicateClient ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSessionStealingAnyKey [GOOD] Test command err: 2025-04-17T17:10:05.478775Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:05.478882Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:05.499353Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:05.499606Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:05.523891Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:05.524354Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=13442593046315283486, session=0, seqNo=0) 2025-04-17T17:10:05.524474Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:10:05.536310Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=13442593046315283486, session=1) 2025-04-17T17:10:05.537931Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Execute (sender=[1:132:2158], cookie=6112574165196869237, session=2) 2025-04-17T17:10:05.538003Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Complete (sender=[1:132:2158], cookie=6112574165196869237) 2025-04-17T17:10:05.538616Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Fast-path detach session=1 from sender=[1:132:2158], cookie=1012325878830533964 2025-04-17T17:10:05.539291Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=5231012430235005134, session=1, seqNo=0) 2025-04-17T17:10:05.550713Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=5231012430235005134, session=1) 2025-04-17T17:10:05.550993Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:132:2158], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-17T17:10:05.551104Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-17T17:10:05.551183Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-17T17:10:05.551306Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Execute (sender=[1:132:2158], cookie=13279967909186888861, session=1) 2025-04-17T17:10:05.562656Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-04-17T17:10:05.562723Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-04-17T17:10:05.562774Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2025-04-17T17:10:05.576761Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:132:2158], cookie=111) 2025-04-17T17:10:05.576838Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Complete (sender=[1:132:2158], cookie=13279967909186888861) 2025-04-17T17:10:05.576900Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-04-17T17:10:06.134659Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:06.134751Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:06.155830Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:06.155915Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:06.191429Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:06.191870Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[2:132:2158], cookie=1134882212780066647, path="") 2025-04-17T17:10:06.204709Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[2:132:2158], cookie=1134882212780066647, status=SUCCESS) 2025-04-17T17:10:06.205288Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:141:2165], cookie=111, session=0, seqNo=0) 2025-04-17T17:10:06.205389Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:10:06.205516Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Execute (sender=[2:141:2165], cookie=16268423928076994677, session=1) 2025-04-17T17:10:06.215810Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-04-17T17:10:06.215872Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-04-17T17:10:06.230944Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:141:2165], cookie=111, session=1) 2025-04-17T17:10:06.231018Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Complete (sender=[2:141:2165], cookie=16268423928076994677) 2025-04-17T17:10:06.231063Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-04-17T17:10:06.699541Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:06.699619Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:06.714224Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:06.714315Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:06.727353Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:06.727710Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:130:2156], cookie=13016131521286888219, session=0, seqNo=0) 2025-04-17T17:10:06.727819Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:10:06.752229Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:130:2156], cookie=13016131521286888219, session=1) 2025-04-17T17:10:06.754461Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Execute (sender=[3:130:2156], cookie=912441694682536157, session=1) 2025-04-17T17:10:06.754823Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-04-17T17:10:06.766908Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Complete (sender=[3:130:2156], cookie=912441694682536157) 2025-04-17T17:10:06.769326Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:147:2171], cookie=17124199545844439003) 2025-04-17T17:10:06.769507Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:147:2171], cookie=17124199545844439003) 2025-04-17T17:10:06.771396Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:150:2174], cookie=1444462482685081998, session=0, seqNo=0) 2025-04-17T17:10:06.771628Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-17T17:10:06.789763Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:150:2174], cookie=1444462482685081998, session=2) 2025-04-17T17:10:06.791612Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Execute (sender=[3:130:2156], cookie=14271660516084688037, session=2) 2025-04-17T17:10:06.791905Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 2025-04-17T17:10:06.808929Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Complete (sender=[3:130:2156], cookie=14271660516084688037) 2025-04-17T17:10:08.375968Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:08.376051Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:08.394795Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:08.394894Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:08.412201Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:08.415657Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:130:2156], cookie=12345, session=0, seqNo=0) 2025-04-17T17:10:08.416179Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:10:08.444386Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:130:2156], cookie=12345, session=1) 2025-04-17T17:10:08.445837Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:136:2161], cookie=23456, session=1, seqNo=0) 2025-04-17T17:10:08.463572Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:136:2161], cookie=23456, session=1) 2025-04-17T17:10:09.593822Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:09.593917Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:09.620298Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:09.620623Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:09.650633Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:09.653000Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:130:2156], cookie=12345, session=0, seqNo=0) 2025-04-17T17:10:09.653118Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:10:09.681416Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:130:2156], cookie=12345, session=1) 2025-04-17T17:10:09.682212Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:136:2161], cookie=23456, session=1, seqNo=0) 2025-04-17T17:10:09.700908Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:136:2161], cookie=23456, session=1) >> TCmsTest::VDisksEvictionShouldFailOnMultipleActions [GOOD] >> TCmsTest::DisabledEvictVDisks [GOOD] >> TCmsTest::EmergencyDuringRollingRestart >> TCmsTest::RestartNodeInDownState [GOOD] >> TCmsTest::SamePriorityRequest >> TCmsTenatsTest::TestClusterRatioLimitForceRestartMode [GOOD] >> TCmsTenatsTest::TestClusterLimitForceRestartModeScheduled >> TKesusTest::TestSemaphoreSessionFailures [GOOD] >> TCmsTenatsTest::RequestRestartServices [GOOD] >> TCmsTest::ManageRequests [GOOD] >> TCmsTest::EnableCMSRequestPrioritiesFeatureFlag |73.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TCmsTest::WalleTasks [GOOD] >> TKesusTest::TestQuoterAccountResourcesAggregateClients [GOOD] >> TKesusTest::TestQuoterAccountResourcesAggregateResources >> TCmsTest::WalleTasksWithNodeLimit >> TCmsTest::ScheduledWalleRequestDuringRollingRestart [GOOD] >> TCmsTest::SamePriorityRequest2 >> TKeyValueTest::TestIncorrectRequestThenResponseError [GOOD] >> TKeyValueTest::TestIncrementalKeySet >> TCmsTenatsTest::TestTenantLimitForceRestartMode [GOOD] >> TCmsTenatsTest::TestTenantLimitForceRestartModeScheduled >> TCmsTest::TestForceRestartModeScheduled [GOOD] >> TCmsTest::TestForceRestartModeScheduledDisconnects >> TCmsTest::CheckUnreplicatedDiskPreventsRestart [GOOD] >> TCmsTest::AllVDisksEvictionInRack >> TKesusTest::TestQuoterSubscribeOnResource [GOOD] >> TMaintenanceApiTest::LastRefreshTime [GOOD] |73.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TKesusTest::TestRegisterProxyLinkFailure [GOOD] >> TKesusTest::TestRegisterProxyLinkFailureRace |73.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |73.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |73.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::VDisksEvictionShouldFailOnMultipleActions [GOOD] |73.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::RequestRestartServices [GOOD] >> TCmsTest::RequestReplacePDiskDoesntBreakGroup [GOOD] >> TCmsTest::RequestReplacePDiskConsecutiveWithDone ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSemaphoreSessionFailures [GOOD] Test command err: 2025-04-17T17:10:05.546496Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:05.546605Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:05.560676Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:05.560936Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:05.589518Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:05.589844Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=11600747432385329514, session=0, seqNo=0) 2025-04-17T17:10:05.589958Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:10:05.601896Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=11600747432385329514, session=1) 2025-04-17T17:10:05.602105Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=6052167355851281081, session=0, seqNo=0) 2025-04-17T17:10:05.602177Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-17T17:10:05.614263Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=6052167355851281081, session=2) 2025-04-17T17:10:05.614580Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:132:2158], cookie=111, name="Lock1") 2025-04-17T17:10:05.628114Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:132:2158], cookie=111) 2025-04-17T17:10:05.628565Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:132:2158], cookie=222, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-17T17:10:05.628950Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-17T17:10:05.629306Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-17T17:10:05.643880Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:132:2158], cookie=222) 2025-04-17T17:10:05.644472Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:132:2158], cookie=333, name="Lock1") 2025-04-17T17:10:05.659285Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:132:2158], cookie=333) 2025-04-17T17:10:05.979335Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:05.979403Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:05.991090Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:05.991173Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:06.016737Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:06.017177Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:132:2158], cookie=3013046298689057977, session=0, seqNo=0) 2025-04-17T17:10:06.017304Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:10:06.028889Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:132:2158], cookie=3013046298689057977, session=1) 2025-04-17T17:10:06.029153Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:132:2158], cookie=18008076141146835481, session=0, seqNo=0) 2025-04-17T17:10:06.029255Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-17T17:10:06.040840Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:132:2158], cookie=18008076141146835481, session=2) 2025-04-17T17:10:06.041259Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[2:143:2167], cookie=8057572391575077999, name="Sem1", limit=1) 2025-04-17T17:10:06.041374Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-04-17T17:10:06.052992Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[2:143:2167], cookie=8057572391575077999) 2025-04-17T17:10:06.053297Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:132:2158], cookie=111, session=1, semaphore="Sem1" count=1) 2025-04-17T17:10:06.053423Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-04-17T17:10:06.053563Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:132:2158], cookie=222, session=2, semaphore="Sem1" count=1) 2025-04-17T17:10:06.065300Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:132:2158], cookie=111) 2025-04-17T17:10:06.065383Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:132:2158], cookie=222) 2025-04-17T17:10:06.065885Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:151:2175], cookie=7074653456167809123, name="Sem1") 2025-04-17T17:10:06.065975Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:151:2175], cookie=7074653456167809123) 2025-04-17T17:10:06.066398Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:154:2178], cookie=10366085055754898411, name="Sem1") 2025-04-17T17:10:06.066460Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:154:2178], cookie=10366085055754898411) 2025-04-17T17:10:06.066672Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[2:132:2158], cookie=333, name="Sem1") 2025-04-17T17:10:06.066777Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 1 "Sem1" waiter link 2025-04-17T17:10:06.078387Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[2:132:2158], cookie=333) 2025-04-17T17:10:06.078934Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:159:2183], cookie=17154498210289796771, name="Sem1") 2025-04-17T17:10:06.079000Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:159:2183], cookie=17154498210289796771) 2025-04-17T17:10:06.079297Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:162:2186], cookie=3655896183511331523, name="Sem1") 2025-04-17T17:10:06.079340Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:162:2186], cookie=3655896183511331523) 2025-04-17T17:10:06.079530Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[2:132:2158], cookie=444, name="Sem1") 2025-04-17T17:10:06.079594Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2025-04-17T17:10:06.090968Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[2:132:2158], cookie=444) 2025-04-17T17:10:06.091371Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:167:2191], cookie=3807624032723188291, name="Sem1") 2025-04-17T17:10:06.091417Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:167:2191], cookie=3807624032723188291) 2025-04-17T17:10:06.091769Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:170:2194], cookie=10965383691343203200, name="Sem1") 2025-04-17T17:10:06.091810Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:170:2194], cookie=10965383691343203200) 2025-04-17T17:10:06.604794Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:06.604874Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:06.618082Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:06.618164Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:06.632358Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:06.632592Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:130:2156], cookie=4929593799899237107, name="Sem1", limit=1) 2025-04-17T17:10:06.632706Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-04-17T17:10:06.654366Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:130:2156], cookie=4929593799899237107) 2025-04-17T17:10:06.654739Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:139:2163], cookie=11631976439131518944, name="Sem2", limit=1) 2025-04-17T17:10:06.654839Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 2 "Sem2" 2025-04-17T17:10:06.666391Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:139:2163], cookie=11631976439131518944) 2025-04-17T17:10:06.666830Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:144:2168], cookie=711416545246598842, name="Sem1") 2025-04-17T17:10:06.666900Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:144:2168], cookie=711416545246598842) 2025-04-17T17:10:06.667258Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:147:2171], cookie=5489551934941047760, name="Sem2") 2025-04-17T17:10:06.667309Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:147:2171], cookie=5489551934941047760) 2025-04-17T17:10:06.677581Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:06.677774Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:06.678963Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:06.680305Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:06.716309Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:06.716599Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:187:2201], cookie=9363371175491879592, name="Sem1") 2025-04-17T17:10:06.716660Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:187:2201], cookie=9363371175491879592) 2025-04-17T17:10:06.717102Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:193:2206], cookie=10561723274254664872, name="Sem2") 2025-04-17T17:10:06.717152Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:193:2206], cookie=10561723274254664872) 2025-04-17T17:10:06.717499Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:196:2209], cookie=3358983814729552338, name="Sem1", limit=1) 2025-04-17T17:10:06.739808Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:196:2209], cookie=3358983814729552338) 2025-04-17T17:10:06.740352Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:201:2214], cookie=16211272519224246877, name="Sem2", limit=1) 2025-04-17T17:10:06.752328Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:201:2214], cookie=16211272519224246877) 2025-04-17T17:10:06.752796Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:206:2219], cookie=17143357902205533768, name="Sem1") 2025-04-17T17:10:06.752848Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:206:2219], cookie=171433 ... de 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 9 "Sem1" 2025-04-17T17:10:08.507534Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:222:2245], cookie=12391089472828740443) 2025-04-17T17:10:08.507902Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[4:227:2250], cookie=10045740964437213545, name="Sem1", force=0) 2025-04-17T17:10:08.507945Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting semaphore 9 "Sem1" 2025-04-17T17:10:08.524491Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[4:227:2250], cookie=10045740964437213545) 2025-04-17T17:10:08.524984Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[4:232:2255], cookie=4754897332772845843, name="Sem1", limit=1) 2025-04-17T17:10:08.525070Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 10 "Sem1" 2025-04-17T17:10:08.541602Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:232:2255], cookie=4754897332772845843) 2025-04-17T17:10:08.542168Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[4:237:2260], cookie=350847942885455900, name="Sem1", force=0) 2025-04-17T17:10:08.542408Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting semaphore 10 "Sem1" 2025-04-17T17:10:08.558071Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[4:237:2260], cookie=350847942885455900) 2025-04-17T17:10:08.559747Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[4:242:2265], cookie=9598183989015287642, name="Sem1", limit=1) 2025-04-17T17:10:08.560423Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 11 "Sem1" 2025-04-17T17:10:08.588101Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:242:2265], cookie=9598183989015287642) 2025-04-17T17:10:08.590440Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=111, session=1, semaphore="Sem1" count=1) 2025-04-17T17:10:08.590970Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 11 "Sem1" queue: next order #1 session 1 2025-04-17T17:10:08.611036Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=111) 2025-04-17T17:10:08.614687Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=222, session=2, semaphore="Sem1" count=1) 2025-04-17T17:10:08.634563Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=222) 2025-04-17T17:10:08.641986Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:130:2156], cookie=333, name="Sem1") 2025-04-17T17:10:08.642600Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 11 "Sem1" waiter link 2025-04-17T17:10:08.669931Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:130:2156], cookie=333) 2025-04-17T17:10:08.670365Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=444, session=2, semaphore="Sem1" count=1) 2025-04-17T17:10:08.687999Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=444) 2025-04-17T17:10:08.690577Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:130:2156], cookie=555, name="Sem1") 2025-04-17T17:10:08.690941Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 11 "Sem1" owner link 2025-04-17T17:10:08.691110Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 11 "Sem1" queue: next order #3 session 2 2025-04-17T17:10:08.704150Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:130:2156], cookie=555) 2025-04-17T17:10:10.155702Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:10.155789Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:10.174013Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:10.174521Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:10.200381Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:10.200753Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=14857887449159506477, session=0, seqNo=0) 2025-04-17T17:10:10.200877Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:10:10.221710Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=14857887449159506477, session=1) 2025-04-17T17:10:10.222753Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:132:2158], cookie=112, name="Sem1", limit=5) 2025-04-17T17:10:10.223664Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-04-17T17:10:10.241317Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:132:2158], cookie=112) 2025-04-17T17:10:10.242159Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:132:2158], cookie=113, name="Sem1") 2025-04-17T17:10:10.262157Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:132:2158], cookie=113) 2025-04-17T17:10:10.263048Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:132:2158], cookie=114, name="Sem1", force=0) 2025-04-17T17:10:10.263444Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Deleting semaphore 1 "Sem1" 2025-04-17T17:10:10.280774Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:132:2158], cookie=114) 2025-04-17T17:10:10.281028Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Fast-path detach session=1 from sender=[5:132:2158], cookie=7632467942123956552 2025-04-17T17:10:10.281294Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:132:2158], cookie=115, name="Sem1", limit=5) 2025-04-17T17:10:10.301786Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:132:2158], cookie=115) 2025-04-17T17:10:10.302934Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:132:2158], cookie=116, name="Sem1") 2025-04-17T17:10:10.322068Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:132:2158], cookie=116) 2025-04-17T17:10:10.325226Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:132:2158], cookie=117, name="Sem1", force=0) 2025-04-17T17:10:10.342552Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:132:2158], cookie=117) 2025-04-17T17:10:10.343250Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=118, session=1, semaphore="Sem1" count=1) 2025-04-17T17:10:10.356294Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=118) 2025-04-17T17:10:10.356607Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:132:2158], cookie=119, name="Sem1") 2025-04-17T17:10:10.368896Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:132:2158], cookie=119) 2025-04-17T17:10:10.369190Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:132:2158], cookie=120, name="Sem1") 2025-04-17T17:10:10.369259Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:132:2158], cookie=120) 2025-04-17T17:10:10.369442Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Execute (sender=[5:132:2158], cookie=9717740184341711429, session=1) 2025-04-17T17:10:10.369528Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-04-17T17:10:10.384731Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Complete (sender=[5:132:2158], cookie=9717740184341711429) 2025-04-17T17:10:10.385023Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:132:2158], cookie=121, name="Sem1", limit=5) 2025-04-17T17:10:10.399506Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:132:2158], cookie=121) 2025-04-17T17:10:10.400778Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:132:2158], cookie=122, name="Sem1") 2025-04-17T17:10:10.426397Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:132:2158], cookie=122) 2025-04-17T17:10:10.427113Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:132:2158], cookie=123, name="Sem1", force=0) 2025-04-17T17:10:10.450686Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:132:2158], cookie=123) 2025-04-17T17:10:10.451724Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=124, session=1, semaphore="Sem1" count=1) 2025-04-17T17:10:10.473980Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=124) 2025-04-17T17:10:10.475198Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:132:2158], cookie=125, name="Sem1") 2025-04-17T17:10:10.501000Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:132:2158], cookie=125) 2025-04-17T17:10:10.501680Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:132:2158], cookie=126, name="Sem1") 2025-04-17T17:10:10.501876Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:132:2158], cookie=126) 2025-04-17T17:10:10.503315Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:132:2158], cookie=127, name="Sem1", limit=5) 2025-04-17T17:10:10.503593Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:132:2158], cookie=127) 2025-04-17T17:10:10.504838Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:132:2158], cookie=128, name="Sem1") 2025-04-17T17:10:10.505181Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:132:2158], cookie=128) 2025-04-17T17:10:10.505897Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:132:2158], cookie=129, name="Sem1", force=0) 2025-04-17T17:10:10.506365Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:132:2158], cookie=129) 2025-04-17T17:10:10.507184Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=130, session=1, semaphore="Sem1" count=1) 2025-04-17T17:10:10.507585Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=130) 2025-04-17T17:10:10.508827Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:132:2158], cookie=131, name="Sem1") 2025-04-17T17:10:10.509176Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:132:2158], cookie=131) 2025-04-17T17:10:10.510163Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:132:2158], cookie=132, name="Sem1") 2025-04-17T17:10:10.510358Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:132:2158], cookie=132) >> TCmsTenatsTest::TestTenantRatioLimitForceRestartModeScheduled [GOOD] >> TCmsTest::ActionIssue |73.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TCmsTest::Notifications [GOOD] >> TCmsTest::Mirror3dcPermissions >> TCmsTenatsTest::TestLimitsWithDownNode [GOOD] >> TCmsTenatsTest::TestScheduledPermissionWithDefaultPolicy >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestMirror3dc [GOOD] >> TCmsTest::VDisksEviction >> TCmsTest::RequestRestartServicesPartial [GOOD] >> TCmsTest::RequestRestartServicesNoUser ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestQuoterSubscribeOnResource [GOOD] Test command err: 2025-04-17T17:10:05.979239Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:05.979341Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:05.992713Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:05.992963Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:06.017154Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:06.022856Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:132:2158], cookie=11451564548750850608, path="/Root", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2025-04-17T17:10:06.023096Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-17T17:10:06.034918Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:132:2158], cookie=11451564548750850608) 2025-04-17T17:10:06.035448Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:141:2165], cookie=10255796140792128421, path="/Root/Folder", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2025-04-17T17:10:06.035602Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Folder" 2025-04-17T17:10:06.047452Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:141:2165], cookie=10255796140792128421) 2025-04-17T17:10:06.047984Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:146:2170], cookie=11822406990111572581, path="/Root/Q1", config={ MaxUnitsPerSecond: 10 }) 2025-04-17T17:10:06.048183Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 3 "Root/Q1" 2025-04-17T17:10:06.059870Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:146:2170], cookie=11822406990111572581) 2025-04-17T17:10:06.060452Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:151:2175], cookie=9360060677402423679, path="/Root/Folder/Q1", config={ MaxUnitsPerSecond: 10 }) 2025-04-17T17:10:06.060671Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 4 "Root/Folder/Q1" 2025-04-17T17:10:06.072407Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:151:2175], cookie=9360060677402423679) 2025-04-17T17:10:06.072891Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:156:2180], cookie=5547623926425367186, path="/Root/Folder/Q2", config={ MaxUnitsPerSecond: 10 }) 2025-04-17T17:10:06.073074Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 5 "Root/Folder/Q2" 2025-04-17T17:10:06.084542Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:156:2180], cookie=5547623926425367186) 2025-04-17T17:10:06.085059Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:161:2185], cookie=9369168996161974450, path="/Root/Folder/Q3", config={ MaxUnitsPerSecond: 10 }) 2025-04-17T17:10:06.085303Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 6 "Root/Folder/Q3" 2025-04-17T17:10:06.096786Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:161:2185], cookie=9369168996161974450) 2025-04-17T17:10:06.097227Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:166:2190], cookie=11596641070593486524, path="/Root2", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2025-04-17T17:10:06.097345Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 7 "Root2" 2025-04-17T17:10:06.114087Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:166:2190], cookie=11596641070593486524) 2025-04-17T17:10:06.116004Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:171:2195], cookie=10821804232522018845, path="/Root2/Q", config={ MaxUnitsPerSecond: 10 }) 2025-04-17T17:10:06.116771Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 8 "Root2/Q" 2025-04-17T17:10:06.135527Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:171:2195], cookie=10821804232522018845) 2025-04-17T17:10:06.137447Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:176:2200], cookie=4624199642883724843, ids=[100], paths=[], recursive=0) 2025-04-17T17:10:06.137866Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:176:2200], cookie=4624199642883724843) 2025-04-17T17:10:06.139977Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:179:2203], cookie=10506607088095709265, ids=[], paths=[Nonexistent/Path], recursive=0) 2025-04-17T17:10:06.140291Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:179:2203], cookie=10506607088095709265) 2025-04-17T17:10:06.142785Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:182:2206], cookie=10797066488645879721, ids=[], paths=[/Root, ], recursive=0) 2025-04-17T17:10:06.143719Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:182:2206], cookie=10797066488645879721) 2025-04-17T17:10:06.147146Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:185:2209], cookie=2251236401020341539, ids=[1, 1], paths=[], recursive=0) 2025-04-17T17:10:06.147312Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:185:2209], cookie=2251236401020341539) 2025-04-17T17:10:06.149441Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:188:2212], cookie=13644263433095526796, ids=[], paths=[/Root2/Q, /Root2/Q], recursive=0) 2025-04-17T17:10:06.149684Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:188:2212], cookie=13644263433095526796) 2025-04-17T17:10:06.151454Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:191:2215], cookie=7339289134402161615, ids=[], paths=[], recursive=1) 2025-04-17T17:10:06.151632Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:191:2215], cookie=7339289134402161615) 2025-04-17T17:10:06.153143Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:194:2218], cookie=14475000763640039764, ids=[], paths=[], recursive=0) 2025-04-17T17:10:06.153219Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:194:2218], cookie=14475000763640039764) 2025-04-17T17:10:06.154488Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:197:2221], cookie=14095220403772213403, ids=[3, 2], paths=[], recursive=1) 2025-04-17T17:10:06.154625Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:197:2221], cookie=14095220403772213403) 2025-04-17T17:10:06.156117Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:200:2224], cookie=7352798859720460190, ids=[3, 2], paths=[], recursive=0) 2025-04-17T17:10:06.156292Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:200:2224], cookie=7352798859720460190) 2025-04-17T17:10:06.158497Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:203:2227], cookie=14688830347399515456, ids=[], paths=[Root2/], recursive=1) 2025-04-17T17:10:06.158856Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:203:2227], cookie=14688830347399515456) 2025-04-17T17:10:06.160778Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:206:2230], cookie=7637810254161310878, ids=[], paths=[Root2/], recursive=0) 2025-04-17T17:10:06.160909Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:206:2230], cookie=7637810254161310878) 2025-04-17T17:10:06.179039Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:06.179124Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:06.179586Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:06.179990Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:06.225020Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:06.225334Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:246:2260], cookie=11386876580760917257, ids=[100], paths=[], recursive=0) 2025-04-17T17:10:06.225402Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:246:2260], cookie=11386876580760917257) 2025-04-17T17:10:06.226050Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:252:2265], cookie=6381480930065463842, ids=[], paths=[Nonexistent/Path], recursive=0) 2025-04-17T17:10:06.226124Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:252:2265], cookie=6381480930065463842) 2025-04-17T17:10:06.226839Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:255:2268], cookie=7678357763375965203, ids=[], paths=[/Root, ], recursive=0) 2025-04-17T17:10:06.226936Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:255:2268], cookie=7678357763375965203) 2025-04-17T17:10:06.227596Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:258:2271], cookie=7334841201498622769, ids=[1, 1], paths=[], recursive=0) 2025-04-17T17:10:06.227679Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:258:2271], cookie=7334841201498622769) 2025-04-17T17:10:06.228395Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:261:2274], cookie=2340986913890737034, ids=[], paths=[/Root2/Q, /Root2/Q], recursive=0) 2025-04-17T17:10:06.228488Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:261:2274], cookie=2340986913890737034) 2025-04-17T17:10:06.229180Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:264:2277], cookie=4253486903331824510, ids=[], paths=[], recursive=1) 2025-04-17T17:10:06.229253Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:264:2277], cookie=4253486903331824510) 2025-04-17T17:10:06.229970Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:267:2280], cookie=2657562836495613433, ids=[], paths=[], recursive=0) 2025-04-17T17:10:06.230027Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:267:2280], cookie=2657562836495613433) 2025-04-17T17:10:06.230669Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:270:2283], cookie=2482947307653068320, ids=[3, 2], paths=[], recursive=1) 2025-04-17T17:10:06.230740Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:270:2283], cookie=2482947307653068320) 2025-04-17T17:10:06.231387Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:273:2286], cookie=6793224901578128816, ids=[3, 2], paths=[], recursive= ... :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:09.964275Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:09.964614Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:130:2156], cookie=9835167298196365076, path="/Root", config={ MaxUnitsPerSecond: 1 }) 2025-04-17T17:10:09.964802Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-17T17:10:09.994144Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:130:2156], cookie=9835167298196365076) 2025-04-17T17:10:09.994562Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:139:2163], cookie=7136150039363002425, path="/Root/Q", config={ }) 2025-04-17T17:10:09.994697Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Q" 2025-04-17T17:10:10.011987Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:139:2163], cookie=7136150039363002425) 2025-04-17T17:10:10.012494Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:144:2168], cookie=11241893618589805504, path="/Root/Folder", config={ }) 2025-04-17T17:10:10.012678Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 3 "Root/Folder" 2025-04-17T17:10:10.026858Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:144:2168], cookie=11241893618589805504) 2025-04-17T17:10:10.028681Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:149:2173], cookie=11646302204692139674, path="/Root/Folder/Q1", config={ }) 2025-04-17T17:10:10.029552Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 4 "Root/Folder/Q1" 2025-04-17T17:10:10.052828Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:149:2173], cookie=11646302204692139674) 2025-04-17T17:10:10.054377Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:154:2178], cookie=800521198651028309, ids=[], paths=[], recursive=1) 2025-04-17T17:10:10.054768Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:154:2178], cookie=800521198651028309) 2025-04-17T17:10:10.061573Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:160:2184], cookie=9289793641489268951, ids=[], paths=[], recursive=1) 2025-04-17T17:10:10.061823Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:160:2184], cookie=9289793641489268951) 2025-04-17T17:10:10.068600Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:166:2190], cookie=9757873234240443716, ids=[], paths=[], recursive=1) 2025-04-17T17:10:10.068713Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:166:2190], cookie=9757873234240443716) 2025-04-17T17:10:10.069148Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:169:2193], cookie=226330413536783604, id=0, path="/Root/Folder/NonexistingRes") 2025-04-17T17:10:10.069216Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:169:2193], cookie=226330413536783604) 2025-04-17T17:10:10.069589Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:172:2196], cookie=8314817674129641755, ids=[], paths=[], recursive=1) 2025-04-17T17:10:10.069646Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:172:2196], cookie=8314817674129641755) 2025-04-17T17:10:10.070033Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:175:2199], cookie=5527104303757022842, id=100, path="") 2025-04-17T17:10:10.070082Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:175:2199], cookie=5527104303757022842) 2025-04-17T17:10:10.070444Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:178:2202], cookie=9635281877389159238, ids=[], paths=[], recursive=1) 2025-04-17T17:10:10.070493Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:178:2202], cookie=9635281877389159238) 2025-04-17T17:10:10.070870Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:181:2205], cookie=4388877827758021895, id=3, path="") 2025-04-17T17:10:10.070919Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:181:2205], cookie=4388877827758021895) 2025-04-17T17:10:10.071287Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:184:2208], cookie=1990929042379184920, ids=[], paths=[], recursive=1) 2025-04-17T17:10:10.071341Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:184:2208], cookie=1990929042379184920) 2025-04-17T17:10:10.071739Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:187:2211], cookie=14056432241339902482, id=0, path="/Root/Folder/Q1") 2025-04-17T17:10:10.071895Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleted quoter resource 4 "Root/Folder/Q1" 2025-04-17T17:10:10.089237Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:187:2211], cookie=14056432241339902482) 2025-04-17T17:10:10.089918Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:192:2216], cookie=17161799018469328862, ids=[], paths=[], recursive=1) 2025-04-17T17:10:10.090004Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:192:2216], cookie=17161799018469328862) 2025-04-17T17:10:10.101607Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:10.101700Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:10.102225Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:10.102800Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:10.152813Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:10.153152Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:232:2246], cookie=12151485007605708073, ids=[], paths=[], recursive=1) 2025-04-17T17:10:10.153237Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:232:2246], cookie=12151485007605708073) 2025-04-17T17:10:10.153738Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:238:2251], cookie=3197598723429516653, id=3, path="") 2025-04-17T17:10:10.153831Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleted quoter resource 3 "Root/Folder" 2025-04-17T17:10:10.165912Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:238:2251], cookie=3197598723429516653) 2025-04-17T17:10:10.166631Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:243:2256], cookie=3406529632504609152, ids=[], paths=[], recursive=1) 2025-04-17T17:10:10.166711Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:243:2256], cookie=3406529632504609152) 2025-04-17T17:10:10.177455Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:10.177539Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:10.178071Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:10.178533Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:10.231548Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:10.231915Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:283:2286], cookie=9934283379982002473, ids=[], paths=[], recursive=1) 2025-04-17T17:10:10.231999Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:283:2286], cookie=9934283379982002473) 2025-04-17T17:10:11.105643Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:11.105747Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:11.127196Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:11.127682Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:11.156901Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:11.157324Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:132:2158], cookie=6533945535868135728, path="/Q1", config={ MaxUnitsPerSecond: 10 }) 2025-04-17T17:10:11.157522Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Q1" 2025-04-17T17:10:11.172841Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:132:2158], cookie=6533945535868135728) 2025-04-17T17:10:11.173464Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:141:2165], cookie=2957186404515110987, path="/Q2", config={ MaxUnitsPerSecond: 10 }) 2025-04-17T17:10:11.173646Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Q2" 2025-04-17T17:10:11.186471Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:141:2165], cookie=2957186404515110987) 2025-04-17T17:10:11.188175Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:146:2170]. Cookie: 11625735012280239693. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Q1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-17T17:10:11.188252Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:146:2170], cookie=11625735012280239693) 2025-04-17T17:10:11.189074Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:146:2170]. Cookie: 14052142180252472312. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Q1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Q2" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } Results { Error { Status: NOT_FOUND Issues { message: "Resource \"/Q3\" doesn\'t exist." } } } ProtocolVersion: 1 } 2025-04-17T17:10:11.189133Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:146:2170], cookie=14052142180252472312) >> TCmsTest::TestLoadLog [GOOD] >> TCmsTest::TestLogOperationsRollback >> TCmsTenatsTest::RequestShutdownHost [GOOD] >> TCmsTenatsTest::RequestShutdownHostWithTenantPolicy >> TCmsTest::RequestReplaceDevicePDiskByPath [GOOD] >> TCmsTest::RequestReplaceManyDevicesOnOneNode |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TMaintenanceApiTest::LastRefreshTime [GOOD] >> TCmsTest::TestProcessingQueue [GOOD] >> TCmsTest::SysTabletsNode [GOOD] >> TCmsTest::RacyStartCollecting [GOOD] >> TCmsTest::PriorityRange >> TCmsTest::EnableCMSRequestPrioritiesFeatureFlag [GOOD] >> TCmsTest::WalleRequestDuringRollingRestart [GOOD] >> TCmsTest::SamePriorityRequest [GOOD] >> TCmsTest::StateStorageAvailabilityMode [GOOD] >> TKesusTest::TestRegisterProxyLinkFailureRace [GOOD] >> TCmsTenatsTest::TestClusterLimitForceRestartModeScheduled [GOOD] >> TCmsTenatsTest::TestClusterRatioLimitForceRestartModeScheduled >> TKesusTest::TestQuoterAccountResourcesDeduplicateClient [GOOD] >> TKesusTest::TestQuoterAccountResourcesForgetClient ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::TestProcessingQueue [GOOD] Test command err: 2025-04-17T17:10:05.416937Z node 1 :CMS ERROR: [InfoCollector] Couldn't get base config 2025-04-17T17:10:05.538341Z node 1 :CMS ERROR: [InfoCollector] Couldn't get base config 2025-04-17T17:10:05.553636Z node 1 :CMS ERROR: [InfoCollector] Couldn't get base config 2025-04-17T17:10:05.598601Z node 1 :CMS ERROR: [InfoCollector] Couldn't get base config 2025-04-17T17:10:11.311580Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 17:17 2025-04-17T17:10:11.311756Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 18:18 2025-04-17T17:10:11.311777Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 19:19 2025-04-17T17:10:11.311794Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 20:20 2025-04-17T17:10:11.311812Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 21:21 2025-04-17T17:10:11.311829Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 22:22 2025-04-17T17:10:11.311845Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 23:23 2025-04-17T17:10:11.311863Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 24:24 >> TCmsTest::TestForceRestartModeScheduledDisconnects [GOOD] >> TDataShardTrace::TestTraceDistributedUpsert-UseSink |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::SysTabletsNode [GOOD] >> TCmsTest::EmergencyDuringRollingRestart [GOOD] >> TKesusTest::TestQuoterAccountResourcesAggregateResources [GOOD] >> TKesusTest::TestQuoterAccountLabels |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::EnableCMSRequestPrioritiesFeatureFlag [GOOD] >> TDataShardTrace::TestTraceWriteImmediateOnShard >> TDataShardTrace::TestTraceDistributedSelectViaReadActors >> TCmsTest::StateStorageRollingRestart [GOOD] >> TCmsTest::StateStorageLockedNodes |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::StateStorageAvailabilityMode [GOOD] |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::SamePriorityRequest [GOOD] >> TCmsTest::SamePriorityRequest2 [GOOD] >> TCmsTenatsTest::TestTenantLimitForceRestartModeScheduled [GOOD] |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::WalleRequestDuringRollingRestart [GOOD] >> TCmsTest::AllVDisksEvictionInRack [GOOD] |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::EmergencyDuringRollingRestart [GOOD] Test command err: 2025-04-17T17:10:08.238688Z node 10 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvGetConfigRequest { }, response# NKikimr::NCms::TEvCms::TEvGetConfigResponse { Status { Code: OK } Config { DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: true UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 60 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 } } } 2025-04-17T17:10:08.240898Z node 10 :CMS DEBUG: TTxUpdateConfig Execute 2025-04-17T17:10:08.290359Z node 10 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-04-17T17:10:08.290530Z node 10 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-04-17T17:10:08.292490Z node 10 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-17-17" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 17 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-10-10" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 10 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-11-11" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 11 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-12-12" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 12 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-13-13" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 13 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-14-14" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 14 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-15-15" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 15 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-16-16" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 16 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Timestamp: 120027512 } } 2025-04-17T17:10:08.293079Z node 10 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-17-17" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 17 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-10-10" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 10 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-11-11" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 11 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-12-12" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 12 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-13-13" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 13 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-14-14" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 14 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-15-15" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 15 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-16-16" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 16 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Timestamp: 120027512 } 2025-04-17T17:10:08.293595Z node 10 :CMS DEBUG: [Sentinel] [Main] Config was updated in 120.002512s 2025-04-17T17:10:08.293794Z node 10 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-04-17T17:10:08.294024Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 10, wbId# [10:8388350642965737326:1634689637] 2025-04-17T17:10:08.294208Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 11, wbId# [11:8388350642965737326:1634689637] 2025-04-17T17:10:08.294400Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 12, wbId# [12:8388350642965737326:1634689637] 2025-04-17T17:10:08.294503Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2025-04-17T17:10:08.294534Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2025-04-17T17:10:08.294562Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 15, wbId# [15:8388350642965737326:1634689637] 2025-04-17T17:10:08.294587Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 16, wbId# [16:8388350642965737326:1634689637] 2025-04-17T17:10:08.294616Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 17, wbId# [17:83883506 ... :CMS DEBUG: Running CleanupWalleTasks 2025-04-17T17:10:08.711222Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 10, wbId# [10:8388350642965737326:1634689637] 2025-04-17T17:10:08.711523Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 11, wbId# [11:8388350642965737326:1634689637] 2025-04-17T17:10:08.711844Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 12, wbId# [12:8388350642965737326:1634689637] 2025-04-17T17:10:08.712266Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2025-04-17T17:10:08.712389Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2025-04-17T17:10:08.712531Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 15, wbId# [15:8388350642965737326:1634689637] 2025-04-17T17:10:08.712555Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 16, wbId# [16:8388350642965737326:1634689637] 2025-04-17T17:10:08.712578Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 17, wbId# [17:8388350642965737326:1634689637] 2025-04-17T17:10:08.713748Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 10, response# PDiskStateInfo { PDiskId: 10 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-10.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240027 2025-04-17T17:10:08.715809Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 13, response# PDiskStateInfo { PDiskId: 13 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-13.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240027 2025-04-17T17:10:08.716681Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 14, response# PDiskStateInfo { PDiskId: 14 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-14.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240027 2025-04-17T17:10:08.717204Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 15, response# PDiskStateInfo { PDiskId: 15 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-15.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240027 2025-04-17T17:10:08.717274Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 16, response# PDiskStateInfo { PDiskId: 16 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-16.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240027 2025-04-17T17:10:08.717499Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 17, response# PDiskStateInfo { PDiskId: 17 CreateTime: 0 ChangeTime: 0 Path: "/17/pdisk-17.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240027 2025-04-17T17:10:08.717887Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 12, response# PDiskStateInfo { PDiskId: 12 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-12.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240027 2025-04-17T17:10:08.717940Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 11, response# PDiskStateInfo { PDiskId: 11 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-11.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240027 2025-04-17T17:10:08.717983Z node 10 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-04-17T17:10:08.719129Z node 10 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 10:10, status# FAULTY, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 3 StateLimit# 1, dry run# 0 2025-04-17T17:10:08.719664Z node 10 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 1 2025-04-17T17:10:08.720588Z node 10 :CMS DEBUG: TTxLogAndSend Execute 2025-04-17T17:10:08.721349Z node 10 :CMS DEBUG: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Success: true, cookie# 2 2025-04-17T17:10:08.721678Z node 10 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 10:10 2025-04-17T17:10:08.741832Z node 10 :CMS DEBUG: TTxLogAndSend Complete 2025-04-17T17:10:08.790682Z node 10 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-04-17T17:10:08.790899Z node 10 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-04-17T17:10:08.791245Z node 10 :CMS DEBUG: Timestamp: 1970-01-01T00:04:00Z 2025-04-17T17:10:08.794740Z node 10 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "10" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 10 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-04-17T17:10:08.795470Z node 10 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "10" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 10 has not yet been completed" } 2025-04-17T17:10:08.795818Z node 10 :CMS DEBUG: Result: ERROR (reason: Evict vdisks is disabled in Sentinel (self heal)) 2025-04-17T17:10:08.796367Z node 10 :CMS DEBUG: TTxStorePermissions Execute 2025-04-17T17:10:08.797223Z node 10 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-04-17T17:10:08.810842Z node 10 :CMS DEBUG: TTxStorePermissions complete 2025-04-17T17:10:08.811026Z node 10 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ERROR Reason: "Evict vdisks is disabled in Sentinel (self heal)" } RequestId: "user-r-1" } 2025-04-17T17:10:08.811731Z node 10 :CMS DEBUG: TTxUpdateConfig Execute 2025-04-17T17:10:08.837219Z node 10 :CMS DEBUG: TTxUpdateConfig Complete 2025-04-17T17:10:08.838174Z node 10 :CMS DEBUG: Updated config: DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: true UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 1 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 } 2025-04-17T17:10:08.909781Z node 10 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-04-17T17:10:08.909835Z node 10 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-04-17T17:10:08.910006Z node 10 :CMS DEBUG: Running CleanupWalleTasks 2025-04-17T17:10:08.911541Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 10, wbId# [10:8388350642965737326:1634689637] 2025-04-17T17:10:08.911877Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 11, wbId# [11:8388350642965737326:1634689637] 2025-04-17T17:10:08.911908Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 12, wbId# [12:8388350642965737326:1634689637] 2025-04-17T17:10:08.912091Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2025-04-17T17:10:08.912277Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2025-04-17T17:10:08.912305Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 15, wbId# [15:8388350642965737326:1634689637] 2025-04-17T17:10:08.912346Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 16, wbId# [16:8388350642965737326:1634689637] 2025-04-17T17:10:08.912374Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 17, wbId# [17:8388350642965737326:1634689637] 2025-04-17T17:10:08.912625Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 10, response# PDiskStateInfo { PDiskId: 10 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-10.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300027 2025-04-17T17:10:08.913250Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 11, response# PDiskStateInfo { PDiskId: 11 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-11.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300027 2025-04-17T17:10:08.913358Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 12, response# PDiskStateInfo { PDiskId: 12 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-12.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300027 2025-04-17T17:10:08.913477Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 14, response# PDiskStateInfo { PDiskId: 14 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-14.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300027 2025-04-17T17:10:08.913541Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 15, response# PDiskStateInfo { PDiskId: 15 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-15.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300027 2025-04-17T17:10:08.913608Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 16, response# PDiskStateInfo { PDiskId: 16 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-16.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300027 2025-04-17T17:10:08.913661Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 17, response# PDiskStateInfo { PDiskId: 17 CreateTime: 0 ChangeTime: 0 Path: "/17/pdisk-17.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300027 2025-04-17T17:10:08.913709Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 13, response# PDiskStateInfo { PDiskId: 13 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-13.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300027 2025-04-17T17:10:08.913745Z node 10 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-04-17T17:10:08.913957Z node 10 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 10:10, status# ACTIVE, required status# FAULTY, reason# Forced status, dry run# 0 2025-04-17T17:10:08.914029Z node 10 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 1 2025-04-17T17:10:08.914192Z node 10 :CMS DEBUG: TTxLogAndSend Execute 2025-04-17T17:10:08.914377Z node 10 :CMS DEBUG: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Success: true, cookie# 3 2025-04-17T17:10:08.914427Z node 10 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 10:10 |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestRegisterProxyLinkFailureRace [GOOD] Test command err: 2025-04-17T17:10:07.453017Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:07.453134Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:07.473948Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:07.474224Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:07.509106Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:09.035164Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:09.035255Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:09.082076Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:09.082671Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:09.157353Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:09.910846Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:09.910950Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:09.929051Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:09.929156Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:09.956759Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:11.040529Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:11.040632Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:11.062785Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:11.063003Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:11.098560Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:11.100131Z node 4 :PIPE_SERVER ERROR: [72057594037927937] NodeDisconnected NodeId# 5 2025-04-17T17:10:11.100680Z node 4 :KESUS_TABLET TRACE: Got TEvServerDisconnected([4:187:2159]) 2025-04-17T17:10:12.532239Z node 6 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:12.532327Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:12.565126Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:12.565450Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute ... waiting for register request 2025-04-17T17:10:12.598929Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete ... blocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from TEST_ACTOR_RUNTIME to KESUS_TABLET_ACTOR ... waiting for register request (done) ... unblocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from TEST_ACTOR_RUNTIME to KESUS_TABLET_ACTOR 2025-04-17T17:10:12.601212Z node 6 :PIPE_SERVER ERROR: [72057594037927937] NodeDisconnected NodeId# 7 2025-04-17T17:10:12.601624Z node 6 :KESUS_TABLET TRACE: Got TEvServerDisconnected([6:187:2159]) |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::TestForceRestartModeScheduledDisconnects [GOOD] >> TDataShardTrace::TestTraceDistributedSelect >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOk >> TDataShardTrace::TestTraceDistributedUpsert+UseSink >> TCmsTest::RequestReplacePDiskConsecutiveWithDone [GOOD] >> KeyValueReadStorage::ReadError [GOOD] >> KeyValueReadStorage::ReadErrorWithWrongGroupId [GOOD] >> KeyValueReadStorage::ReadErrorWithUncorrectCookie [GOOD] >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorks >> TRtmrTest::CreateWithoutTimeCastBuckets |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TCmsTest::RequestRestartServicesNoUser [GOOD] |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestTenantLimitForceRestartModeScheduled [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::AllVDisksEvictionInRack [GOOD] Test command err: 2025-04-17T17:10:12.212145Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-04-17T17:10:12.212225Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-04-17T17:10:12.212335Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-04-17T17:10:12.214027Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 25 InterconnectPort: 12001 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-26-26" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 26 InterconnectPort: 12002 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-27-27" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 27 InterconnectPort: 12003 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-28-28" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 28 InterconnectPort: 12004 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-29-29" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 29 InterconnectPort: 12005 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-30-30" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 30 InterconnectPort: 12006 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-31-31" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 31 InterconnectPort: 12007 Location { Rack: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-32-32" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 32 InterconnectPort: 12008 Location { Rack: "4" } StartTimeSeconds: 0 } Timestamp: 120028000 } } 2025-04-17T17:10:12.214655Z node 25 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 25 InterconnectPort: 12001 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-26-26" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 26 InterconnectPort: 12002 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-27-27" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 27 InterconnectPort: 12003 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-28-28" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 28 InterconnectPort: 12004 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-29-29" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 29 InterconnectPort: 12005 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-30-30" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 30 InterconnectPort: 12006 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-31-31" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 31 InterconnectPort: 12007 Location { Rack: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-32-32" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 32 InterconnectPort: 12008 Location { Rack: "4" } StartTimeSeconds: 0 } Timestamp: 120028000 } 2025-04-17T17:10:12.214904Z node 25 :CMS DEBUG: [Sentinel] [Main] Config was updated in 120.003000s 2025-04-17T17:10:12.214950Z node 25 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-04-17T17:10:12.215099Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true 2025-04-17T17:10:12.215167Z node 25 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 2025-04-17T17:10:12.215219Z node 25 :CMS DEBUG: Result: DISALLOW_TEMP (reason: VDisks eviction from host 25 has not yet been completed) 2025-04-17T17:10:12.215347Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2025-04-17T17:10:12.215548Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 25 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-04-17T17:10:12.215595Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Add host marker: host# 25, marker# MARKER_DISK_FAULTY 2025-04-17T17:10:12.215826Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 25, wbId# [25:8388350642965737326:1634689637] 2025-04-17T17:10:12.215874Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 26, wbId# [26:8388350642965737326:1634689637] 2025-04-17T17:10:12.215901Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 27, wbId# [27:8388350642965737326:1634689637] 2025-04-17T17:10:12.215926Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 28, wbId# [28:8388350642965737326:1634689637] 2025-04-17T17:10:12.215949Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 29, wbId# [29:8388350642965737326:1634689637] 2025-04-17T17:10:12.215974Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 30, wbId# [30:8388350642965737326:1634689637] 2025-04-17T17:10:12.215998Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 31, wbId# [31:8388350642965737326:1634689637] 2025-04-17T17:10:12.216042Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 32, wbId# [32:8388350642965737326:1634689637] 2025-04-17T17:10:12.228005Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 25, response# PDiskStateInfo { PDiskId: ... [28:8388350642965737326:1634689637] 2025-04-17T17:10:12.544592Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 29, wbId# [29:8388350642965737326:1634689637] 2025-04-17T17:10:12.544619Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 30, wbId# [30:8388350642965737326:1634689637] 2025-04-17T17:10:12.544672Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 31, wbId# [31:8388350642965737326:1634689637] 2025-04-17T17:10:12.544701Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 32, wbId# [32:8388350642965737326:1634689637] 2025-04-17T17:10:12.544913Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 25, response# PDiskStateInfo { PDiskId: 25 CreateTime: 0 ChangeTime: 0 Path: "/25/pdisk-25.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180028 2025-04-17T17:10:12.545558Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 27, response# PDiskStateInfo { PDiskId: 27 CreateTime: 0 ChangeTime: 0 Path: "/27/pdisk-27.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180028 2025-04-17T17:10:12.545656Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 28, response# PDiskStateInfo { PDiskId: 28 CreateTime: 0 ChangeTime: 0 Path: "/28/pdisk-28.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180028 2025-04-17T17:10:12.545716Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 29, response# PDiskStateInfo { PDiskId: 29 CreateTime: 0 ChangeTime: 0 Path: "/29/pdisk-29.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180028 2025-04-17T17:10:12.545776Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 30, response# PDiskStateInfo { PDiskId: 30 CreateTime: 0 ChangeTime: 0 Path: "/30/pdisk-30.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180028 2025-04-17T17:10:12.545839Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 31, response# PDiskStateInfo { PDiskId: 31 CreateTime: 0 ChangeTime: 0 Path: "/31/pdisk-31.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180028 2025-04-17T17:10:12.545905Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 32, response# PDiskStateInfo { PDiskId: 32 CreateTime: 0 ChangeTime: 0 Path: "/32/pdisk-32.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180028 2025-04-17T17:10:12.545960Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 26, response# PDiskStateInfo { PDiskId: 26 CreateTime: 0 ChangeTime: 0 Path: "/26/pdisk-26.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180028 2025-04-17T17:10:12.546038Z node 25 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-04-17T17:10:12.546206Z node 25 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 26:26, status# ACTIVE, required status# FAULTY, reason# Forced status, dry run# 0 2025-04-17T17:10:12.546270Z node 25 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 25:25, status# ACTIVE, required status# FAULTY, reason# Forced status, dry run# 0 2025-04-17T17:10:12.546324Z node 25 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 2 2025-04-17T17:10:12.546489Z node 25 :CMS DEBUG: TTxLogAndSend Execute 2025-04-17T17:10:12.546677Z node 25 :CMS DEBUG: TTxLogAndSend Execute 2025-04-17T17:10:12.546788Z node 25 :CMS DEBUG: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { Success: true } Success: true, cookie# 1 2025-04-17T17:10:12.546835Z node 25 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 25:25 2025-04-17T17:10:12.546866Z node 25 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 26:26 2025-04-17T17:10:12.564616Z node 25 :CMS DEBUG: TTxLogAndSend Complete 2025-04-17T17:10:12.564696Z node 25 :CMS DEBUG: TTxLogAndSend Complete 2025-04-17T17:10:12.587279Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-04-17T17:10:12.587529Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-04-17T17:10:12.587760Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:03:00Z 2025-04-17T17:10:12.589207Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 25 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-04-17T17:10:12.589932Z node 25 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 25 has not yet been completed" } 2025-04-17T17:10:12.590283Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 25, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-04-17T17:10:12.590643Z node 25 :CMS DEBUG: Ring: 0; State: Ok 2025-04-17T17:10:12.590664Z node 25 :CMS DEBUG: Ring: 1; State: Ok 2025-04-17T17:10:12.590679Z node 25 :CMS DEBUG: Ring: 2; State: Ok 2025-04-17T17:10:12.590877Z node 25 :CMS DEBUG: Result: ALLOW 2025-04-17T17:10:12.591652Z node 25 :CMS DEBUG: Accepting permission: id# user-p-1, requestId# user-r-1, owner# user 2025-04-17T17:10:12.592180Z node 25 :CMS INFO: Adding lock for Host ::1:12001 (25) (permission user-p-1 until 1970-01-01T00:13:00Z) 2025-04-17T17:10:12.592625Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2025-04-17T17:10:12.593653Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:13:00.128000Z, action# Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 2025-04-17T17:10:12.594054Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-04-17T17:10:12.612381Z node 25 :CMS DEBUG: TTxStorePermissions complete 2025-04-17T17:10:12.613505Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 } Deadline: 780128000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 25 InterconnectPort: 12001 } } } } 2025-04-17T17:10:12.613721Z node 25 :CMS DEBUG: Schedule cleanup at 1970-01-01T00:33:00.128000Z 2025-04-17T17:10:12.631955Z node 25 :CMS INFO: Adding lock for Host ::1:12001 (25) (permission user-p-1 until 1970-01-01T00:13:00Z) 2025-04-17T17:10:12.632626Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-04-17T17:10:12.632693Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-04-17T17:10:12.632743Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:03:00Z 2025-04-17T17:10:12.634406Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 26 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-04-17T17:10:12.635220Z node 25 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 26 has not yet been completed" } 2025-04-17T17:10:12.635776Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 26, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-04-17T17:10:12.635977Z node 25 :CMS DEBUG: Result: ALLOW 2025-04-17T17:10:12.637118Z node 25 :CMS DEBUG: Accepting permission: id# user-p-2, requestId# user-r-2, owner# user 2025-04-17T17:10:12.637517Z node 25 :CMS INFO: Adding lock for Host ::1:12002 (26) (permission user-p-2 until 1970-01-01T00:13:00Z) 2025-04-17T17:10:12.637949Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2025-04-17T17:10:12.638812Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:13:00.229512Z, action# Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 2025-04-17T17:10:12.639491Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-2, owner# user, order# 2, priority# 0, body# User: "user" PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-04-17T17:10:12.665772Z node 25 :CMS DEBUG: TTxStorePermissions complete 2025-04-17T17:10:12.667063Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-2" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-2" Permissions { Id: "user-p-2" Action { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 } Deadline: 780229512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 26 InterconnectPort: 12002 } } } } 2025-04-17T17:10:12.670640Z node 25 :CMS INFO: User user is done with permissions user-p-1 2025-04-17T17:10:12.671012Z node 25 :CMS DEBUG: Resulting status: OK 2025-04-17T17:10:12.671209Z node 25 :CMS DEBUG: TTxRemovePermissions Execute 2025-04-17T17:10:12.671571Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reset host markers: host# 25 2025-04-17T17:10:12.672291Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-1, reason# permission user-p-1 was removed 2025-04-17T17:10:12.672626Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-1, reason# explicit remove 2025-04-17T17:10:12.694300Z node 25 :CMS DEBUG: TTxRemovePermissions Complete 2025-04-17T17:10:12.695314Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-1" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2025-04-17T17:10:12.696478Z node 25 :CMS INFO: User user is done with permissions user-p-2 2025-04-17T17:10:12.696523Z node 25 :CMS DEBUG: Resulting status: OK 2025-04-17T17:10:12.696576Z node 25 :CMS DEBUG: TTxRemovePermissions Execute 2025-04-17T17:10:12.696661Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reset host markers: host# 26 2025-04-17T17:10:12.696748Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-2, reason# permission user-p-2 was removed 2025-04-17T17:10:12.696792Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-2, reason# explicit remove 2025-04-17T17:10:12.719710Z node 25 :CMS DEBUG: TTxRemovePermissions Complete 2025-04-17T17:10:12.720374Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-2" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } >> TMaintenanceApiTest::RequestReplaceDevicePDisk [GOOD] |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::SamePriorityRequest2 [GOOD] >> TKeyValueTest::TestIncrementalKeySet [GOOD] >> TKeyValueTest::TestGetStatusWorksNewApi >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEvents >> TCmsTenatsTest::TestScheduledPermissionWithDefaultPolicy [GOOD] >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOkNewApi >> TCmsTest::VDisksEviction [GOOD] |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::RequestReplacePDiskConsecutiveWithDone [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadErrorWithUncorrectCookie [GOOD] Test command err: 2025-04-17T17:10:15.356306Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# ERROR ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-04-17T17:10:15.356448Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV316@keyvalue_storage_read_request.cpp:270} Unexpected EvGetResult. KeyValue# 1 Status# ERROR Deadline# 18446744073709551 Now# 0 SentAt# 1970-01-01T00:00:00.000000Z GotAt# 1744909815355 ErrorReason# 2025-04-17T17:10:15.365009Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 2 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-04-17T17:10:15.365102Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV318@keyvalue_storage_read_request.cpp:240} Received EvGetResult from an unexpected storage group. KeyValue# 1 GroupId# 2 ExpecetedGroupId# 3 Status# OK Deadline# 18446744073709551 Now# 0 SentAt# 1970-01-01T00:00:00.000000Z GotAt# 1744909815364 ErrorReason# 2025-04-17T17:10:15.371121Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-04-17T17:10:15.371199Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV319@keyvalue_storage_read_request.cpp:222} Received EvGetResult with an unexpected cookie. KeyValue# 1 Cookie# 1000 SentGets# 1 GroupId# 3 Status# OK Deadline# 18446744073709551 Now# 0 GotAt# 1744909815370 ErrorReason# >> TCmsTest::PriorityRange [GOOD] |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::RequestRestartServicesNoUser [GOOD] >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEvents [GOOD] >> TKeyValueTest::TestWriteLongKey >> TCmsTenatsTest::TestClusterRatioLimitForceRestartModeScheduled [GOOD] |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TMaintenanceApiTest::RequestReplaceDevicePDisk [GOOD] >> TKeyValueTest::TestInlineCopyRangeWorks |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestScheduledPermissionWithDefaultPolicy [GOOD] >> TKesusTest::TestQuoterAccountLabels [GOOD] >> TKesusTest::TestPassesUpdatedPropsToSession ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::VDisksEviction [GOOD] Test command err: 2025-04-17T17:10:14.184949Z node 18 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-04-17T17:10:14.185046Z node 18 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-04-17T17:10:14.185227Z node 18 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-04-17T17:10:14.187352Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-18-18" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 18 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-19-19" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 19 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-20-20" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 20 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-21-21" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 21 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-22-22" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 22 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-23-23" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 23 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-24-24" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 24 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 25 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120026512 } } 2025-04-17T17:10:14.189076Z node 18 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-18-18" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 18 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-19-19" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 19 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-20-20" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 20 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-21-21" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 21 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-22-22" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 22 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-23-23" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 23 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-24-24" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 24 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 25 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120026512 } 2025-04-17T17:10:14.189371Z node 18 :CMS DEBUG: [Sentinel] [Main] Config was updated in 120.002512s 2025-04-17T17:10:14.189425Z node 18 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-04-17T17:10:14.189621Z node 18 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true 2025-04-17T17:10:14.189702Z node 18 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 2025-04-17T17:10:14.189763Z node 18 :CMS DEBUG: Result: DISALLOW_TEMP (reason: VDisks eviction from host 18 has not yet been completed) 2025-04-17T17:10:14.189916Z node 18 :CMS DEBUG: TTxStorePermissions Execute 2025-04-17T17:10:14.190157Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 18 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-04-17T17:10:14.190212Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Add host marker: host# 18, marker# MARKER_DISK_FAULTY 2025-04-17T17:10:14.190490Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 18, wbId# [18:8388350642965737326:1634689637] 2025-04-17T17:10:14.190542Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 19, wbId# [19:8388350642965737326:1634689637] 2025-04-17T17:10:14.190575Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 20, wbId# [20:8388350642965737326:1634689637] 2025-04-17T17:10:14.190618Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 21, wbId# [21:8388350642965737326:1634689637] 2025-04-17T17:10:14.190662Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 22, wbId# [22:8388350642965737326:1634689637] 2025-04-1 ... mp: 120539560 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120539560 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120539560 } Timestamp: 120539560 NodeId: 25 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120539560 } } 2025-04-17T17:10:14.619915Z node 18 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120539560 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120539560 } Devices { Name: "pdisk-18-18" State: UP Timestamp: 120539560 } Timestamp: 120539560 NodeId: 18 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120539560 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120539560 } Devices { Name: "pdisk-19-19" State: UP Timestamp: 120539560 } Timestamp: 120539560 NodeId: 19 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120539560 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120539560 } Devices { Name: "pdisk-20-20" State: UP Timestamp: 120539560 } Timestamp: 120539560 NodeId: 20 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120539560 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120539560 } Devices { Name: "pdisk-21-21" State: UP Timestamp: 120539560 } Timestamp: 120539560 NodeId: 21 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120539560 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120539560 } Devices { Name: "pdisk-22-22" State: UP Timestamp: 120539560 } Timestamp: 120539560 NodeId: 22 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120539560 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120539560 } Devices { Name: "pdisk-23-23" State: UP Timestamp: 120539560 } Timestamp: 120539560 NodeId: 23 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120539560 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120539560 } Devices { Name: "pdisk-24-24" State: UP Timestamp: 120539560 } Timestamp: 120539560 NodeId: 24 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120539560 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120539560 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120539560 } Timestamp: 120539560 NodeId: 25 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120539560 } 2025-04-17T17:10:14.620255Z node 18 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true 2025-04-17T17:10:14.620334Z node 18 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 2025-04-17T17:10:14.620411Z node 18 :CMS DEBUG: Result: DISALLOW_TEMP (reason: VDisks eviction from host 18 has not yet been completed) 2025-04-17T17:10:14.620564Z node 18 :CMS DEBUG: TTxStorePermissions Execute 2025-04-17T17:10:14.620787Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-3, owner# user, order# 3, priority# 0, body# User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 18 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-04-17T17:10:14.620838Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Add host marker: host# 18, marker# MARKER_DISK_FAULTY 2025-04-17T17:10:14.621074Z node 18 :CMS DEBUG: [Sentinel] [Main] Config was updated in 0.100000s 2025-04-17T17:10:14.621130Z node 18 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-04-17T17:10:14.621244Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 18, wbId# [18:8388350642965737326:1634689637] 2025-04-17T17:10:14.621294Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 19, wbId# [19:8388350642965737326:1634689637] 2025-04-17T17:10:14.621323Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 20, wbId# [20:8388350642965737326:1634689637] 2025-04-17T17:10:14.621364Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 21, wbId# [21:8388350642965737326:1634689637] 2025-04-17T17:10:14.621396Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 22, wbId# [22:8388350642965737326:1634689637] 2025-04-17T17:10:14.621433Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 23, wbId# [23:8388350642965737326:1634689637] 2025-04-17T17:10:14.621467Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 24, wbId# [24:8388350642965737326:1634689637] 2025-04-17T17:10:14.621506Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 25, wbId# [25:8388350642965737326:1634689637] 2025-04-17T17:10:14.621715Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 18, response# PDiskStateInfo { PDiskId: 18 CreateTime: 120441072 ChangeTime: 120441072 Path: "/18/pdisk-18.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120539 2025-04-17T17:10:14.622376Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 23, response# PDiskStateInfo { PDiskId: 23 CreateTime: 120441072 ChangeTime: 120441072 Path: "/23/pdisk-23.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120539 2025-04-17T17:10:14.622540Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 25, response# PDiskStateInfo { PDiskId: 25 CreateTime: 120441072 ChangeTime: 120441072 Path: "/25/pdisk-25.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120539 2025-04-17T17:10:14.622641Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 19, response# PDiskStateInfo { PDiskId: 19 CreateTime: 120441072 ChangeTime: 120441072 Path: "/19/pdisk-19.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120539 2025-04-17T17:10:14.622698Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 20, response# PDiskStateInfo { PDiskId: 20 CreateTime: 120441072 ChangeTime: 120441072 Path: "/20/pdisk-20.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120539 2025-04-17T17:10:14.622754Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 21, response# PDiskStateInfo { PDiskId: 21 CreateTime: 120441072 ChangeTime: 120441072 Path: "/21/pdisk-21.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120539 2025-04-17T17:10:14.622820Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 22, response# PDiskStateInfo { PDiskId: 22 CreateTime: 120441072 ChangeTime: 120441072 Path: "/22/pdisk-22.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120539 2025-04-17T17:10:14.622892Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 24, response# PDiskStateInfo { PDiskId: 24 CreateTime: 120441072 ChangeTime: 120441072 Path: "/24/pdisk-24.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120539 2025-04-17T17:10:14.622953Z node 18 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-04-17T17:10:14.639163Z node 18 :CMS DEBUG: TTxStorePermissions complete 2025-04-17T17:10:14.639423Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "VDisks eviction from host 18 has not yet been completed" } RequestId: "user-r-3" Deadline: 0 } 2025-04-17T17:10:14.640070Z node 18 :CMS INFO: User user removes request user-r-3 2025-04-17T17:10:14.640146Z node 18 :CMS DEBUG: Resulting status: OK 2025-04-17T17:10:14.640223Z node 18 :CMS DEBUG: TTxRemoveRequest Execute 2025-04-17T17:10:14.640268Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reset host markers: host# 18 2025-04-17T17:10:14.640416Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-3, reason# explicit remove 2025-04-17T17:10:14.660802Z node 18 :CMS DEBUG: TTxRemoveRequest Complete 2025-04-17T17:10:14.661011Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManageRequestRequest { User: "user" Command: REJECT RequestId: "user-r-3" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManageRequestResponse { Status { Code: OK } } >> TCmsTest::ActionIssue [GOOD] >> KeyValueReadStorage::ReadOk >> KeyValueReadStorage::ReadOk [GOOD] >> KeyValueReadStorage::ReadNotWholeBlobOk [GOOD] >> KeyValueReadStorage::ReadOneItemError [GOOD] >> TCmsTest::StateStorageLockedNodes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::PriorityRange [GOOD] Test command err: 2025-04-17T17:10:11.593361Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 17:17 2025-04-17T17:10:11.593434Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 18:18 2025-04-17T17:10:11.593458Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 19:19 2025-04-17T17:10:11.593480Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 20:20 2025-04-17T17:10:11.593501Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 21:21 2025-04-17T17:10:11.593522Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 22:22 2025-04-17T17:10:11.593543Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 23:23 2025-04-17T17:10:11.593568Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 24:24 2025-04-17T17:10:11.607453Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 17:17 2025-04-17T17:10:11.607507Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 18:18 2025-04-17T17:10:11.607530Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 19:19 2025-04-17T17:10:11.607552Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 20:20 2025-04-17T17:10:11.607573Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 21:21 2025-04-17T17:10:11.607595Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 22:22 2025-04-17T17:10:11.607615Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 23:23 2025-04-17T17:10:11.608949Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 24:24 2025-04-17T17:10:11.649373Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 17:17 2025-04-17T17:10:11.649433Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 18:18 2025-04-17T17:10:11.649454Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 19:19 2025-04-17T17:10:11.649478Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 20:20 2025-04-17T17:10:11.649498Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 21:21 2025-04-17T17:10:11.649517Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 22:22 2025-04-17T17:10:11.649535Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 23:23 2025-04-17T17:10:11.649558Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 24:24 >> TKeyValueTest::TestWrite200KDeleteThenResponseErrorNewApi >> KeyValueReadStorage::ReadRangeOk1Key [GOOD] >> KeyValueReadStorage::ReadRangeOk [GOOD] >> KeyValueReadStorage::ReadRangeNoData [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorEmpty >> TKeyValueTest::TestWriteReadPatchRead >> TRtmrTest::CreateWithoutTimeCastBuckets [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorEmpty [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorMany |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestClusterRatioLimitForceRestartModeScheduled [GOOD] >> TCmsTest::RequestReplaceManyDevicesOnOneNode [GOOD] >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOk >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOk >> TKeyValueTest::TestBasicWriteRead |73.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |73.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |73.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadOneItemError [GOOD] Test command err: 2025-04-17T17:10:17.348123Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-04-17T17:10:17.358863Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2025-04-17T17:10:17.437954Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-04-17T17:10:17.438035Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2025-04-17T17:10:17.451663Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-04-17T17:10:17.451766Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV317@keyvalue_storage_read_request.cpp:310} Unexpected EvGetResult. KeyValue# 1 Status# OK Id# [1:2:3:2:0:1:0] ResponseStatus# ERROR Deadline# 586524-01-19T08:01:49.551615Z Now# 1970-01-01T00:00:00.000000Z SentAt# 1970-01-01T00:00:00.000000Z GotAt# 2025-04-17T17:10:17.451456Z ErrorReason# >> TCmsTenatsTest::RequestShutdownHostWithTenantPolicy [GOOD] >> TCmsTenatsTest::TestClusterLimitForceRestartMode >> TKeyValueTest::TestRenameWorks >> TKeyValueTest::TestConcatWorks >> TKeyValueCollectorTest::TestKeyValueCollectorMany [GOOD] >> KeyValueReadStorage::ReadWithTwoPartsOk [GOOD] >> TKeyValueTest::TestWriteReadPatchRead [GOOD] >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOkWithNewApi ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadRangeNoData [GOOD] Test command err: 2025-04-17T17:10:17.630145Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-04-17T17:10:17.632744Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2025-04-17T17:10:17.638835Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 2 ErrorReason# ReadRequestCookie# 0 2025-04-17T17:10:17.638898Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2025-04-17T17:10:17.644931Z 1 00h00m00.000000s :KEYVALUE INFO: {KV320@keyvalue_storage_read_request.cpp:122} Inline read request KeyValue# 1 Status# OK 2025-04-17T17:10:17.644978Z 1 00h00m00.000000s :KEYVALUE DEBUG: {KV322@keyvalue_storage_read_request.cpp:134} Expected OK or UNKNOWN and given OK readCount# 0 2025-04-17T17:10:17.645037Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::ActionIssue [GOOD] >> TKesusTest::TestPassesUpdatedPropsToSession [GOOD] |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::StateStorageLockedNodes [GOOD] >> TKeyValueTest::TestWrite200KDeleteThenResponseError >> TCmsTest::WalleTasksWithNodeLimit [GOOD] >> TCmsTest::WalleTasksDifferentPriorities ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TRtmrTest::CreateWithoutTimeCastBuckets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:10:16.513019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:10:16.513101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:10:16.513138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:10:16.513197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:10:16.513235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:10:16.513260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:10:16.513314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:10:16.513412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:10:16.513729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:10:16.645477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:10:16.645530Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:10:16.679748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:10:16.679869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:10:16.680016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:10:16.711814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:10:16.712856Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:10:16.716761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:10:16.717507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:10:16.744598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:10:16.745995Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:10:16.746201Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:10:16.746768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:10:16.747160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:10:16.747529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:10:16.755515Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:10:16.827382Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:10:17.227054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:10:17.227913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:10:17.228892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:10:17.230695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:10:17.230759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:10:17.245984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:10:17.247222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:10:17.249451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:10:17.249976Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:10:17.250339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:10:17.250699Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:10:17.265557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:10:17.265620Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:10:17.265813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:10:17.282416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:10:17.282646Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:10:17.283203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:10:17.283597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:10:17.308204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:10:17.316308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:10:17.317585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:10:17.322838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:10:17.323430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:10:17.323965Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:10:17.325367Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:10:17.325568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:10:17.326806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:10:17.327522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:10:17.333935Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:10:17.334317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:10:17.335168Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:10:17.335531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:10:17.340676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:10:17.340889Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:10:17.341800Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:10:17.342165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:10:17.342367Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:10:17.342747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:10:17.343288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:10:17.343685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:10:17.344038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:10:17.346108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:10:17.346233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:10:17.346281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:10:17.346324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:10:17.348310Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:10:17.348439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:10:17.348684Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... NFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046678944, cookie: 100 2025-04-17T17:10:17.480494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046678944, cookie: 100 2025-04-17T17:10:17.480536Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 100 2025-04-17T17:10:17.480564Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 1 2025-04-17T17:10:17.480589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T17:10:17.480642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 0/1, is published: true 2025-04-17T17:10:17.493709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-17T17:10:17.493799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateRTMR TConfigureParts ProgressState operationId# 100:0 at tablet72057594046678944 2025-04-17T17:10:17.493840Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 3 -> 128 2025-04-17T17:10:17.497529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-17T17:10:17.497991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-17T17:10:17.506599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-17T17:10:17.506800Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateRTMR TPropose, operationId: 100:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:10:17.506991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 100 ready parts: 1/1 2025-04-17T17:10:17.507101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:10:17.513419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-04-17T17:10:17.513961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-04-17T17:10:17.514920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:10:17.515027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:10:17.515081Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateRTMR TPropose, operationId: 100:0 HandleReply TEvOperationPlan, at schemeshard: 72057594046678944 2025-04-17T17:10:17.515171Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-04-17T17:10:17.515324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:10:17.515379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 FAKE_COORDINATOR: Erasing txId 100 2025-04-17T17:10:17.517577Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:10:17.517614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:10:17.517756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:10:17.517843Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:10:17.517888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-04-17T17:10:17.517920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-04-17T17:10:17.518252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-17T17:10:17.518301Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-04-17T17:10:17.518404Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-17T17:10:17.518448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-17T17:10:17.518487Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-17T17:10:17.518515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-17T17:10:17.518549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-04-17T17:10:17.518581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-17T17:10:17.518614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-04-17T17:10:17.518641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-04-17T17:10:17.518707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T17:10:17.518750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-04-17T17:10:17.518779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-17T17:10:17.518806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-04-17T17:10:17.519345Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-17T17:10:17.519444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-17T17:10:17.519478Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-04-17T17:10:17.519514Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-17T17:10:17.519549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:10:17.520237Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2025-04-17T17:10:17.520301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2025-04-17T17:10:17.520322Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-04-17T17:10:17.520368Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-17T17:10:17.520407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T17:10:17.520473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-04-17T17:10:17.523010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-17T17:10:17.523816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-04-17T17:10:17.524067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-17T17:10:17.524123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-04-17T17:10:17.524539Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-17T17:10:17.524624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-17T17:10:17.524658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:312:2303] TestWaitNotification: OK eventTxId 100 2025-04-17T17:10:17.525060Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/rtmr1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:10:17.525217Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/rtmr1" took 176us result status StatusSuccess 2025-04-17T17:10:17.525506Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/rtmr1" PathDescription { Self { Name: "rtmr1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeRtmrVolume CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 RTMRVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } RtmrVolumeDescription { Name: "rtmr1" PathId: 2 PartitionsCount: 0 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TKeyValueTest::TestCleanUpDataOnEmptyTablet >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOk >> TKeyValueTest::TestCopyRangeWorks |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::RequestReplaceManyDevicesOnOneNode [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadWithTwoPartsOk [GOOD] Test command err: 2025-04-17T17:10:18.047208Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 2 ErrorReason# ReadRequestCookie# 0 2025-04-17T17:10:18.049298Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestPassesUpdatedPropsToSession [GOOD] Test command err: 2025-04-17T17:10:05.970168Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:05.970265Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:05.984796Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:05.985040Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:06.012650Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:06.017652Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:132:2158], cookie=13037265314179651030, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-04-17T17:10:06.017954Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-17T17:10:06.029404Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:132:2158], cookie=13037265314179651030) 2025-04-17T17:10:06.029745Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:142:2166], cookie=2004395417346830576, path="/Root/Res", config={ }) 2025-04-17T17:10:06.029880Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-04-17T17:10:06.041313Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:142:2166], cookie=2004395417346830576) 2025-04-17T17:10:06.042614Z node 1 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [1:147:2171]. Cookie: 15851520868646803896. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-17T17:10:06.042668Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[1:147:2171], cookie=15851520868646803896) 2025-04-17T17:10:06.043143Z node 1 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [1:147:2171]. Cookie: 7432993851436254551. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 29000 } } 2025-04-17T17:10:06.043184Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[1:147:2171], cookie=7432993851436254551) 2025-04-17T17:10:08.991285Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:08.991966Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:09.035609Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:09.036022Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:09.090373Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:09.091718Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:132:2158], cookie=3754586890594222682, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-04-17T17:10:09.093085Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-17T17:10:09.110462Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:132:2158], cookie=3754586890594222682) 2025-04-17T17:10:09.112871Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [2:142:2166]. Cookie: 8510802820207617224. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-17T17:10:09.113237Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[2:142:2166], cookie=8510802820207617224) 2025-04-17T17:10:09.115072Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [2:142:2166]. Cookie: 1718340435883906112. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-17T17:10:09.115750Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[2:142:2166], cookie=1718340435883906112) 2025-04-17T17:10:09.117940Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [2:142:2166]. Cookie: 8028403766446483272. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2025-04-17T17:10:09.118129Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[2:142:2166], cookie=8028403766446483272) 2025-04-17T17:10:09.120085Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [2:142:2166]. Cookie: 11956022732416830806. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2025-04-17T17:10:09.120298Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[2:142:2166], cookie=11956022732416830806) 2025-04-17T17:10:11.753605Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:11.753823Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:11.781740Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:11.781986Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:11.813587Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:11.814686Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:130:2156], cookie=4221574131066696372, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-04-17T17:10:11.816750Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-17T17:10:11.841646Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:130:2156], cookie=4221574131066696372) 2025-04-17T17:10:11.842123Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:140:2164], cookie=16995244689739422218, path="/Root/Res1", config={ }) 2025-04-17T17:10:11.842301Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res1" 2025-04-17T17:10:11.855477Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:140:2164], cookie=16995244689739422218) 2025-04-17T17:10:11.855949Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:145:2169], cookie=9891117499169835094, path="/Root/Res2", config={ }) 2025-04-17T17:10:11.856151Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 3 "Root/Res2" 2025-04-17T17:10:11.875626Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:145:2169], cookie=9891117499169835094) 2025-04-17T17:10:11.876479Z node 3 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:150:2174]. Cookie: 13416830773504212896. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-17T17:10:11.876532Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[3:150:2174], cookie=13416830773504212896) 2025-04-17T17:10:11.877059Z node 3 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:150:2174]. Cookie: 13377229243416102917. Data: { Results { ResourceId: 3 Error { Status: SUCCESS } EffectiveProps { ResourceId: 3 ResourcePath: "Root/Res2" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-17T17:10:11.877104Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[3:150:2174], cookie=13377229243416102917) 2025-04-17T17:10:11.877564Z node 3 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [3:150:2174]. Cookie: 1463495896238288067. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 1019500 } ResourcesInfo { ResourceId: 3 AcceptedUs: 1019500 } } 2025-04-17T17:10:11.877607Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[3:150:2174], cookie=1463495896238288067) 2025-04-17T17:10:14.717254Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:14.717362Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:14.736756Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:14.736999Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:14.757916Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:14.758247Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:130:2156], cookie=3730483948984109318, path="/Root", config={ MaxUnitsPerSecond: 100 PrefetchCoefficient: 300 }) 2025-04-17T17:10:14.758507Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-17T17:10:14.785530Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:130:2156], cookie=3730483948984109318) 2025-04-17T17:10:14.789644Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:140:2164]. Cookie: 8964188012605447550. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 300 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { Enabled: true BillingPeriodSec: 2 Labels { key: "k1" value: "v1" } Labels { key: "k2" value: "v2" } } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-17T17:10:14.789865Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[4:140:2164], cookie=8964188012605447550) 2025-04-17T17:10:14.791644Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [4:140:2164]. Cookie: 7751525988203858255. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 26500 } } 2025-04-17T17:10:14.800697Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[4:140:2164], cookie=7751525988203858255) 2025-04-17T17:10:17.806700Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:17.806806Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:17.828595Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:17.828711Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:17.843186Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:17.843572Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:130:2156], cookie=14572608811454033870, path="/Root", config={ MaxUnitsPerSecond: 100 }) 2025-04-17T17:10:17.843783Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-17T17:10:17.869097Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:130:2156], cookie=14572608811454033870) 2025-04-17T17:10:17.869784Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:139:2163], cookie=177458694863742921, path="/Root/Res", config={ }) 2025-04-17T17:10:17.870200Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-04-17T17:10:17.884010Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:139:2163], cookie=177458694863742921) 2025-04-17T17:10:17.884892Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:144:2168]. Cookie: 17377417667170853569. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-17T17:10:17.884952Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:144:2168], cookie=17377417667170853569) 2025-04-17T17:10:17.885462Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceUpdate::Execute (sender=[5:148:2172], cookie=2659536462263936577, id=0, path="/Root", config={ MaxUnitsPerSecond: 150 }) 2025-04-17T17:10:17.885623Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Updated quoter resource 1 "Root" 2025-04-17T17:10:17.885808Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:144:2168]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 150 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } StateNotification { Status: SUCCESS } } } 2025-04-17T17:10:17.899483Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceUpdate::Complete (sender=[5:148:2172], cookie=2659536462263936577) 2025-04-17T17:10:17.900085Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvUpdateConsumptionStateAck to [5:144:2168]. Cookie: 13549389697652979417. Data: { } 2025-04-17T17:10:17.900151Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Update quoter resources consumption state (sender=[5:144:2168], cookie=13549389697652979417) >> TKeyValueTest::TestRewriteThenLastValue >> TKeyValueTest::TestWriteTrimWithRestartsThenResponseOk >> TCmsTest::TestLogOperationsRollback [GOOD] >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOk |73.9%| [TA] $(B)/ydb/core/tx/schemeshard/ut_rtmr/test-results/unittest/{meta.json ... results_accumulator.log} >> TCmsTest::Mirror3dcPermissions [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorSingle >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorks >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOk >> TKeyValueCollectorTest::TestKeyValueCollectorSingle [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorSingleWithOneError |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::Mirror3dcPermissions [GOOD] |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TKeyValueCollectorTest::TestKeyValueCollectorSingleWithOneError [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorMultiple |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::TestLogOperationsRollback [GOOD] |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TKeyValueCollectorTest::TestKeyValueCollectorMultiple [GOOD] |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestDisconnectWhileAttaching |74.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |74.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |74.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr/test-results/unittest/{meta.json ... results_accumulator.log} |74.0%| [LD] {RESULT} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb >> TTxDataShardMiniKQL::Write >> TTxDataShardMiniKQL::ReadConstant |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueCollectorTest::TestKeyValueCollectorMultiple [GOOD] >> TTxDataShardMiniKQL::CrossShard_5_AllToAll >> TTxDataShardMiniKQL::CrossShard_1_Cycle >> TTxDataShardMiniKQL::WriteEraseRead |74.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |74.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |74.0%| [LD] {RESULT} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut >> TCmsTenatsTest::TestClusterLimitForceRestartMode [GOOD] >> TCmsTest::WalleTasksDifferentPriorities [GOOD] |74.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |74.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |74.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestClusterLimitForceRestartMode [GOOD] |74.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |74.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |74.0%| [LD] {RESULT} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut >> TProxyActorTest::TestDisconnectWhileAttaching [GOOD] |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::WalleTasksDifferentPriorities [GOOD] >> TKesusTest::TestQuoterAccountResourcesForgetClient [GOOD] |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TTxDataShardMiniKQL::ReadConstant [GOOD] >> TTxDataShardMiniKQL::ReadAfterWrite >> TKeyValueTest::TestInlineWriteReadWithRestartsWithNotCorrectUTF8NewApi [GOOD] >> TKeyValueTest::TestLargeWriteAndDelete >> TDataShardTrace::TestTraceWriteImmediateOnShard [GOOD] >> TTxDataShardMiniKQL::ReadSpecialColumns ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestDisconnectWhileAttaching [GOOD] Test command err: ... waiting for blocked registrations ... blocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from KESUS_PROXY_ACTOR to KESUS_TABLET_ACTOR ... waiting for blocked registrations (done) 2025-04-17T17:10:20.453007Z node 1 :PIPE_SERVER ERROR: [72057594037927937] NodeDisconnected NodeId# 2 ... unblocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from KESUS_PROXY_ACTOR to KESUS_TABLET_ACTOR ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestQuoterAccountResourcesForgetClient [GOOD] Test command err: 2025-04-17T17:10:03.874503Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:03.875282Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:03.955360Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:03.956600Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:04.007552Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:04.029945Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:132:2158], cookie=3540135272506299911, path="/Res", config={ MaxUnitsPerSecond: -100 }) 2025-04-17T17:10:04.030791Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:132:2158], cookie=3540135272506299911) 2025-04-17T17:10:04.033722Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:139:2163], cookie=5998135122786201902, path="/ResWithoutMaxUnitsPerSecond", config={ }) 2025-04-17T17:10:04.034588Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:139:2163], cookie=5998135122786201902) 2025-04-17T17:10:04.036694Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:142:2166], cookie=17814658043582989405, path="/ResWithMaxUnitsPerSecond", config={ MaxUnitsPerSecond: 1 }) 2025-04-17T17:10:04.038613Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "ResWithMaxUnitsPerSecond" 2025-04-17T17:10:04.059420Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:142:2166], cookie=17814658043582989405) 2025-04-17T17:10:04.062720Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:147:2171], cookie=17034237721113777496, path="/ResWithMaxUnitsPerSecond/ChildWithoutMaxUnitsPerSecond", config={ }) 2025-04-17T17:10:04.063797Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "ResWithMaxUnitsPerSecond/ChildWithoutMaxUnitsPerSecond" 2025-04-17T17:10:04.079419Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:147:2171], cookie=17034237721113777496) 2025-04-17T17:10:05.478650Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:05.478743Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:05.498553Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:05.498651Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:05.522750Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:05.523086Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:132:2158], cookie=14985040383924465903, path="/Root", config={ MaxUnitsPerSecond: 100 PrefetchCoefficient: 300 }) 2025-04-17T17:10:05.523412Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-17T17:10:05.535142Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:132:2158], cookie=14985040383924465903) 2025-04-17T17:10:05.535739Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:142:2166], cookie=2237395348308883269, path="/Root/Res", config={ }) 2025-04-17T17:10:05.535956Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-04-17T17:10:05.547850Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:142:2166], cookie=2237395348308883269) 2025-04-17T17:10:05.549779Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [2:147:2171]. Cookie: 18070130054048424638. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 300 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 2 Version: "version" Schema: "schema" CloudId: "cloud" FolderId: "folder" ResourceId: "resource" SourceId: "source" Tags { key: "key" value: "value" } } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-17T17:10:05.549858Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[2:147:2171], cookie=18070130054048424638) 2025-04-17T17:10:05.550390Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [2:147:2171]. Cookie: 10827192485872359345. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 29000 } } 2025-04-17T17:10:05.550435Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[2:147:2171], cookie=10827192485872359345) 2025-04-17T17:10:08.463436Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:08.463539Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:08.485713Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:08.485936Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:08.504680Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:08.505024Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:130:2156], cookie=13610551550571312328, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-04-17T17:10:08.505274Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-17T17:10:08.532759Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:130:2156], cookie=13610551550571312328) 2025-04-17T17:10:08.534448Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:140:2164], cookie=4130844790497801707, path="/Root/Res", config={ }) 2025-04-17T17:10:08.535695Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-04-17T17:10:08.565403Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:140:2164], cookie=4130844790497801707) 2025-04-17T17:10:08.568080Z node 3 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:145:2169]. Cookie: 8914888836088697040. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-17T17:10:08.568265Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[3:145:2169], cookie=8914888836088697040) 2025-04-17T17:10:08.572573Z node 3 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [3:145:2169]. Cookie: 12716606233441784068. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 1018000 } } 2025-04-17T17:10:08.572624Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[3:145:2169], cookie=12716606233441784068) 2025-04-17T17:10:11.173580Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:11.173667Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:11.193443Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:11.193559Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:11.213604Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:11.213958Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:130:2156], cookie=3273309904216366207, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-04-17T17:10:11.214211Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-17T17:10:11.237161Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:130:2156], cookie=3273309904216366207) 2025-04-17T17:10:11.237889Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:140:2164]. Cookie: 2179271224654667393. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-17T17:10:11.237941Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[4:140:2164], cookie=2179271224654667393) 2025-04-17T17:10:11.238340Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [4:140:2164]. Cookie: 3766892107919902793. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1016500 } } 2025-04-17T17:10:11.238397Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[4:140:2164], cookie=3766892107919902793) 2025-04-17T17:10:11.238732Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [4:140:2164]. Cookie: 9983737876196451988. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1016500 } } 2025-04-17T17:10:11.238769Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[4:140:2164], cookie=9983737876196451988) 2025-04-17T17:10:14.402308Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:14.402423Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:14.425408Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:14.428567Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:14.468442Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:14.472552Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:132:2158], cookie=559129059119801292, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-04-17T17:10:14.473626Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-17T17:10:14.486297Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:132:2158], cookie=559129059119801292) 2025-04-17T17:10:14.488044Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:142:2166]. Cookie: 10198563815469937749. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-17T17:10:14.488215Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:142:2166], cookie=10198563815469937749) 2025-04-17T17:10:14.492967Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [5:142:2166]. Cookie: 4144790622956853975. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 3000000 } } 2025-04-17T17:10:14.493077Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[5:142:2166], cookie=4144790622956853975) 2025-04-17T17:10:17.177609Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:173:2190]. Cookie: 8471418328225258330. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-17T17:10:17.177677Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:173:2190], cookie=8471418328225258330) 2025-04-17T17:10:17.179322Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [5:173:2190]. Cookie: 13486109304454049837. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 9000000 } } 2025-04-17T17:10:17.179366Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[5:173:2190], cookie=13486109304454049837) 2025-04-17T17:10:19.367729Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:200:2216]. Cookie: 2851310789099332633. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-17T17:10:19.367791Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:200:2216], cookie=2851310789099332633) 2025-04-17T17:10:19.368292Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [5:200:2216]. Cookie: 7378005176884527124. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 15000000 } } 2025-04-17T17:10:19.368344Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[5:200:2216], cookie=7378005176884527124) >> TKeyValueTest::TestGetStatusWorksNewApi [GOOD] >> TTxDataShardMiniKQL::WriteEraseRead [GOOD] >> TTxDataShardMiniKQL::WriteAndReadMultipleShards |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TTxDataShardMiniKQL::Write [GOOD] >> TTxDataShardMiniKQL::TableStats >> TDataShardTrace::TestTraceDistributedUpsert-UseSink [GOOD] >> TTxDataShardMiniKQL::WriteKeyTooLarge |74.0%| [TA] $(B)/ydb/core/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceWriteImmediateOnShard [GOOD] Test command err: 2025-04-17T17:10:18.980761Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:10:18.981111Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:10:18.981172Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/000ee9/r3tmp/tmpBVUhPe/pdisk_1.dat 2025-04-17T17:10:19.269789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:10:19.310499Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:10:19.349260Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:10:19.349435Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:10:19.361038Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:10:19.441544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestGetStatusWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:76:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:79:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:78:2110] Leader for TabletID 72057594037927937 is [4:81:2111] sender: [4:82:2057] recipient: [4:78:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:81:2111] Leader for TabletID 72057594037927937 is [4:81:2111] sender: [4:135:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvKeyValue::TEvGetStorageChannelStatus ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:76:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:78:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:79:2110] Leader for TabletID 72057594037927937 is [5:81:2111] sender: [5:82:2057] recipient: [5:79:2110] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:81:2111] Leader for TabletID 72057594037927937 is [5:81:2111] sender: [5:135:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:77:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:81:2057] recipient: [6:79:2110] Leader for TabletID 72057594037927937 is [6:82:2111] sender: [6:83:2057] recipient: [6:79:2110] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:82:2111] Leader for TabletID 72057594037927937 is [6:82:2111] sender: [6:136:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] >> TTxDataShardMiniKQL::ReadAfterWrite [GOOD] >> TTxDataShardMiniKQL::ReadNonExisting ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedUpsert-UseSink [GOOD] Test command err: 2025-04-17T17:10:18.411495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:10:18.412030Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:10:18.412100Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/000f0b/r3tmp/tmpvRTLFT/pdisk_1.dat 2025-04-17T17:10:18.746648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:10:18.783718Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:10:18.821555Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:10:18.821706Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:10:18.833177Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:10:18.914164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:10:21.240966Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:934:2776], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:21.241423Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:945:2781], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:21.244064Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:21.278315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T17:10:21.315865Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-17T17:10:21.618574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:948:2784], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T17:10:21.892202Z node 1 :TX_PROXY ERROR: Actor# [1:1017:2833] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:10:23.325098Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2bns9a2ae6se09p2xktcyg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzM0MDY1ZS00YjI3NTRiZC04ZTFkNGI4Yi1iMzJkYmNiMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Trace: (Session.query.QUERY_ACTION_EXECUTE -> [(CompileService -> [(CompileActor)]) , (LiteralExecuter) , (DataExecuter -> [(WaitForTableResolve) , (RunTasks) , (Datashard.Transaction -> [(Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWithConfirmedReadOnlyLease) , (Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.WriteLog -> [(Tablet.WriteLog.LogEntry)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendResult)]) , (Datashard.Transaction -> [(Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWithConfirmedReadOnlyLease) , (Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.WriteLog -> [(Tablet.WriteLog.LogEntry)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendResult)])])]) >> TDataShardTrace::TestTraceDistributedSelectViaReadActors [GOOD] >> TProxyActorTest::TestCreateSemaphoreInterrupted >> TDataShardTrace::TestTraceDistributedUpsert+UseSink [GOOD] |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TTxDataShardMiniKQL::MemoryUsageImmediateSmallTx >> TDataShardTrace::TestTraceDistributedSelect [GOOD] >> TTxDataShardMiniKQL::ReadSpecialColumns [GOOD] >> TTxDataShardMiniKQL::SelectRange ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedUpsert+UseSink [GOOD] Test command err: 2025-04-17T17:10:19.441986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:10:19.442345Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:10:19.442402Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/000edf/r3tmp/tmpIJ4WO0/pdisk_1.dat 2025-04-17T17:10:19.791750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:10:19.838569Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:10:19.882108Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:10:19.882267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:10:19.894623Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:10:19.981255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:10:22.792310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:934:2776], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:22.792764Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:945:2781], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:22.795654Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:22.822673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T17:10:22.980840Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-17T17:10:23.370149Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:948:2784], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T17:10:23.860278Z node 1 :TX_PROXY ERROR: Actor# [1:1017:2833] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:10:25.460863Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2bntrf1xhk2rd6skvpzbg7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmFkMjFjM2YtNThhMjQ1OGItNzBhYTY5YzEtMTU5YjYyM2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Trace: (Session.query.QUERY_ACTION_EXECUTE -> [(CompileService -> [(CompileActor)]) , (DataExecuter -> [(WaitForTableResolve) , (ComputeActor -> [(ForwardWriteActor)]) , (RunTasks) , (Commit -> [(Datashard.WriteTransaction -> [(Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWithConfirmedReadOnlyLease) , (Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.WriteLog -> [(Tablet.WriteLog.LogEntry)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWriteResult)]) , (Datashard.WriteTransaction -> [(Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWithConfirmedReadOnlyLease) , (Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.WriteLog -> [(Tablet.WriteLog.LogEntry)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWriteResult)])])])]) >> TestProtocols::TestResolveProtocol ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedSelectViaReadActors [GOOD] Test command err: 2025-04-17T17:10:18.801751Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:10:18.802316Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:10:18.802385Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/000ef7/r3tmp/tmpbup0vj/pdisk_1.dat 2025-04-17T17:10:19.169858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:10:19.206778Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:10:19.246244Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:10:19.246414Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:10:19.258012Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:10:19.337185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:10:22.194323Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:934:2776], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:22.194789Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:945:2781], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:22.197594Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:22.227078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T17:10:22.319361Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-17T17:10:22.537250Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:948:2784], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T17:10:22.913188Z node 1 :TX_PROXY ERROR: Actor# [1:1017:2833] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:10:24.429938Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2bnt73bqxcyqejtwvmmgk6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjEyZDVhZGUtNTk3Mzg4NmEtMzAyNjBlNDEtMjA1NTAxODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:10:24.584198Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2bnwed51m2r89zndnr4h36, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDdlZjIyZWEtY2Q5YWZkODktNmEyNzVjYmUtNjgyYTIzZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:10:25.256552Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01js2bnwmaapw7h0zvmpkbar29, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWI2YzhkZGUtOTViYzQ1MmUtNGYxNDFlMGEtODdhOTY4MjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TTxDataShardMiniKQL::ReadNonExisting [GOOD] >> TTxDataShardMiniKQL::WriteKeyTooLarge [GOOD] >> TTxDataShardMiniKQL::WriteValueTooLarge >> TTxDataShardMiniKQL::WriteAndReadMultipleShards [GOOD] >> TTxDataShardMiniKQL::WriteAndReadMany >> UpsertLoad::ShouldCreateTable >> TestProtocols::TestConnectProtocol |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphoreInterrupted [GOOD] >> KqpStreamLookup::ReadTableWithIndexDuringSplit >> TTxDataShardMiniKQL::TableStats [GOOD] >> TTxDataShardMiniKQL::TableStatsHistograms >> TInterconnectTest::TestBlobEvent220BytesPreSerialized |74.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom |74.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |74.1%| [TA] {RESULT} $(B)/ydb/core/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} |74.1%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation >> TInterconnectTest::TestManyEvents >> TestProtocols::TestResolveProtocol [GOOD] >> TestProtocols::TestHTTPCollectedVerySlow >> TInterconnectTest::OldFormat >> TTxDataShardMiniKQL::MemoryUsageImmediateSmallTx [GOOD] >> TTxDataShardMiniKQL::MemoryUsageImmediateMediumTx ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedSelect [GOOD] Test command err: 2025-04-17T17:10:19.380401Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:10:19.380983Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:10:19.381069Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/000ede/r3tmp/tmp3o08pX/pdisk_1.dat 2025-04-17T17:10:19.859322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:10:19.897836Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:10:19.936165Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:10:19.936323Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:10:19.947710Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:10:20.039023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:10:23.443531Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:934:2776], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:23.444000Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:945:2781], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:23.446480Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:23.465291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T17:10:23.591081Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-17T17:10:23.922198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:948:2784], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T17:10:24.289694Z node 1 :TX_PROXY ERROR: Actor# [1:1017:2833] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:10:25.442759Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2bnve76h42gdvkcb01f72r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzIzNWQ2NjYtZmIwNDgzNy02OWM2YjVkYy01MTk5Njc2Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:10:25.671319Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2bnxe352p3wpc9j38eekmr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2ViMmFjODQtYWZjZWVjYmItMjQ2ZTM3MWUtYzQyYzlhNDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:10:26.626076Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01js2bny6s1a3czyak68jgcpf4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2NmM2MyZGEtY2ZiYTJjNTQtZGVhZjRhZjctYzI2ODIyMzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::ReadNonExisting [GOOD] Test command err: 2025-04-17T17:10:20.784497Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:10:20.797219Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:10:20.797598Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:126:2151] 2025-04-17T17:10:20.797787Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:10:20.841802Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:10:20.846189Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:10:20.996639Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:10:21.017321Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-17T17:10:21.017725Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-17T17:10:21.018147Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-17T17:10:21.020609Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:10:21.022397Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:10:21.022790Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:189:2151] in generation 2 2025-04-17T17:10:21.024079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:10:21.024495Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:10:21.281650Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:10:21.411386Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-17T17:10:21.412953Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:10:21.413632Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-04-17T17:10:21.414245Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-17T17:10:21.414640Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-17T17:10:21.414856Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:10:21.416357Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:10:21.416746Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:10:21.418958Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-17T17:10:21.419398Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-17T17:10:21.420295Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T17:10:21.420517Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:10:21.420923Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-17T17:10:21.421366Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T17:10:21.421766Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T17:10:21.422193Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-17T17:10:21.423134Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T17:10:21.423647Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:10:21.423693Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:10:21.423750Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-04-17T17:10:21.435967Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:126:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-17T17:10:21.436016Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:10:21.436101Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T17:10:21.437109Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-17T17:10:21.437307Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-17T17:10:21.440021Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-17T17:10:21.440588Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T17:10:21.440783Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-17T17:10:21.441160Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-17T17:10:21.441358Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T17:10:21.444945Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-17T17:10:21.444986Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-17T17:10:21.445193Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-17T17:10:21.445391Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-17T17:10:21.445428Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-17T17:10:21.446003Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-17T17:10:21.446235Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-17T17:10:21.446439Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-17T17:10:21.446638Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-17T17:10:21.475703Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T17:10:21.476352Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T17:10:21.476599Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-17T17:10:21.476640Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-17T17:10:21.477717Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-17T17:10:21.481810Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:10:21.482249Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:10:21.482662Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-04-17T17:10:21.483722Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:126:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-17T17:10:21.483757Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:10:21.484508Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-17T17:10:21.484908Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T17:10:21.485545Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-17T17:10:21.485775Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-17T17:10:21.507873Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-17T17:10:21.508348Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:10:21.509542Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:10:21.509587Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:10:21.510233Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T17:10:21.510463Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:10:21.510667Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T17:10:21.510894Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-17T17:10:21.511134Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-17T17:10:21.511877Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T17:10:21.512239Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-17T17:10:21.512828Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-17T17:10:21.513055Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-17T17:10:21.514546Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-17T17:10:21.514589Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T17:10:21.514614Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-17T17:10:21.514636Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-17T17:10:21.514655Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-17T17:10:21.514954Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T17:10:21.514984Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-17T17:10:21.515440Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T17:10:21.515858Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T17:10:21.516101Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-17T17:10:21.516529Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-17T17:10:21.516753Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-17T17:10:21.517363Z node 1 :TX_DATA ... tateInit, received event# 268828673, Sender [3:229:2224], Recipient [3:232:2225]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:10:26.880149Z node 3 :TX_DATASHARD TRACE: StateInit, received event# 268828684, Sender [3:229:2224], Recipient [3:232:2225]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:10:26.884404Z node 3 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [3:232:2225] 2025-04-17T17:10:26.884567Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:10:26.888133Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Execute Persist Sys_SubDomainInfo 2025-04-17T17:10:26.913089Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:10:26.913174Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:10:26.914695Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-17T17:10:26.914753Z node 3 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-17T17:10:26.914792Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-17T17:10:26.915087Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:10:26.915198Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:10:26.915238Z node 3 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [3:275:2225] in generation 3 2025-04-17T17:10:26.947136Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:10:26.947227Z node 3 :TX_DATASHARD INFO: Switched to work state Ready tabletId 9437184 2025-04-17T17:10:26.947296Z node 3 :TX_DATASHARD INFO: 9437184 Sending notify to schemeshard 4200 txId 1 state Ready TxInFly 0 2025-04-17T17:10:26.947389Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 9437184 mediators count is 0 coordinators count is 1 buckets per mediator 2 2025-04-17T17:10:26.947578Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [3:280:2264] 2025-04-17T17:10:26.947648Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-17T17:10:26.947690Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 9437184 2025-04-17T17:10:26.947734Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:10:26.947929Z node 3 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Execute 2025-04-17T17:10:26.948008Z node 3 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Complete 2025-04-17T17:10:26.948096Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [3:232:2225], Recipient [3:232:2225]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:10:26.948159Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:10:26.948428Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-17T17:10:26.948534Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-17T17:10:26.948649Z node 3 :FLAT_TX_SCHEMESHARD WARN: Got TEvDataShard::TEvSchemaChanged for unknown txId 1 message# Source { RawX1: 232 RawX2: 12884904113 } Origin: 9437184 State: 2 TxId: 1 Step: 0 Generation: 3 2025-04-17T17:10:26.948726Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [3:24:2071], Recipient [3:232:2225]: {TEvRegisterTabletResult TabletId# 9437184 Entry# 0} 2025-04-17T17:10:26.948757Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-04-17T17:10:26.948788Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 9437184 time 0 2025-04-17T17:10:26.948818Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:10:26.948855Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T17:10:26.948891Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:10:26.948920Z node 3 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-17T17:10:26.948948Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T17:10:26.948977Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T17:10:26.949004Z node 3 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-17T17:10:26.949035Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T17:10:26.949108Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [3:24:2071], Recipient [3:232:2225]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 0 ReadStep# 0 } 2025-04-17T17:10:26.949133Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-04-17T17:10:26.949163Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 9437184 coordinator 72057594046316545 last step 0 next step 0 2025-04-17T17:10:26.949246Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [3:278:2262], Recipient [3:232:2225]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 4200 Status: OK ServerId: [3:282:2266] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-17T17:10:26.949267Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-17T17:10:26.949330Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [3:123:2149], Recipient [3:232:2225]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 1 2025-04-17T17:10:26.949354Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-04-17T17:10:26.949384Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 1 datashard 9437184 state Ready 2025-04-17T17:10:26.949429Z node 3 :TX_DATASHARD DEBUG: 9437184 Got TEvSchemaChangedResult from SS at 9437184 2025-04-17T17:10:26.961322Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [3:278:2262], Recipient [3:232:2225]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 4200 ClientId: [3:278:2262] ServerId: [3:282:2266] } 2025-04-17T17:10:26.961370Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-17T17:10:27.005507Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [3:99:2134], Recipient [3:232:2225]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 99 RawX2: 12884904022 } 2025-04-17T17:10:27.005577Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2025-04-17T17:10:27.005767Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:288:2270], Recipient [3:232:2225]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:10:27.005794Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:10:27.005828Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:286:2269], serverId# [3:288:2270], sessionId# [0:0:0] 2025-04-17T17:10:27.005980Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:99:2134], Recipient [3:232:2225]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 99 RawX2: 12884904022 } TxBody: "\032\365\001\037\004\0021\nvalue\005\205\n\205\002\207\205\002\207\203\001H\006\002\205\004\205\002?\006\002\205\000\034MyReads MyWrites\205\004\205\002?\006\002\206\202\024Reply\024Write?\014\205\002\206\203\010\002 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\010)\211\n?\006\203\005\004\200\205\002\203\004\006\213\002\203\004\203\004$SelectRow\000\003?\036 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000?\004\005?\"\003? p\001\013?&\003?$T\001\003?(\000\037\002\000\005?\016\005?\n?8\000\005?\014\003\005?\024\005?\020?8\000\006\000?\022\003?>\005?\032\006\000?\030\001\037/ \0018\001" TxId: 2 ExecLevel: 0 Flags: 0 2025-04-17T17:10:27.006002Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:10:27.006073Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T17:10:27.006569Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit CheckDataTx 2025-04-17T17:10:27.006632Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-04-17T17:10:27.006660Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit CheckDataTx 2025-04-17T17:10:27.006687Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T17:10:27.006711Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T17:10:27.006741Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-17T17:10:27.006780Z node 3 :TX_DATASHARD TRACE: Activated operation [0:2] at 9437184 2025-04-17T17:10:27.006809Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-04-17T17:10:27.006825Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-17T17:10:27.006842Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit ExecuteDataTx 2025-04-17T17:10:27.006856Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-04-17T17:10:27.007130Z node 3 :TX_DATASHARD TRACE: Executed operation [0:2] at tablet 9437184 with status COMPLETE 2025-04-17T17:10:27.007176Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:2] at 9437184: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-17T17:10:27.007219Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-04-17T17:10:27.007239Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit ExecuteDataTx 2025-04-17T17:10:27.007256Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit FinishPropose 2025-04-17T17:10:27.007275Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit FinishPropose 2025-04-17T17:10:27.007304Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 2 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-17T17:10:27.007357Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is DelayComplete 2025-04-17T17:10:27.007377Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit FinishPropose 2025-04-17T17:10:27.007403Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit CompletedOperations 2025-04-17T17:10:27.007430Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit CompletedOperations 2025-04-17T17:10:27.007457Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-04-17T17:10:27.007471Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit CompletedOperations 2025-04-17T17:10:27.007487Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:2] at 9437184 has finished 2025-04-17T17:10:27.007530Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T17:10:27.007556Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:2] at 9437184 on unit FinishPropose 2025-04-17T17:10:27.007586Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphoreInterrupted [GOOD] >> TestProtocols::TestConnectProtocol [GOOD] >> TestProtocols::TestHTTPCollected >> TInterconnectTest::TestBlobEvent220BytesPreSerialized [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizes >> TTxDataShardMiniKQL::SelectRange [GOOD] >> TTxDataShardMiniKQL::SelectRangeWithNotFullKey >> TInterconnectTest::OldFormat [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheckOnNew |74.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |74.1%| [LD] {RESULT} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |74.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |74.1%| [TA] $(B)/ydb/core/tx/datashard/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} >> TInterconnectTest::TestNotifyUndelivered >> TestProtocols::TestHTTPCollected [GOOD] >> TInterconnectTest::TestTraceIdPassThrough >> TInterconnectTest::TestBlobEventDifferentSizes [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizesPreSerialized >> TInterconnectTest::TestTraceIdPassThrough [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheckOnNew [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheckOnOld >> TInterconnectTest::TestNotifyUndelivered [GOOD] >> TInterconnectTest::TestNotifyUndeliveredOnMissedActor >> TInterconnectTest::TestBlobEvent |74.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |74.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |74.1%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} >> TTxDataShardMiniKQL::SelectRangeWithNotFullKey [GOOD] |74.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |74.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestTraceIdPassThrough [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizesPreSerialized [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizesPreSerializedAndRaw >> TTxDataShardMiniKQL::MemoryUsageImmediateMediumTx [GOOD] >> TTxDataShardMiniKQL::MemoryUsageMultiShard >> TInterconnectTest::OldFormatSuppressVersionCheckOnOld [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheck >> TInterconnectTest::TestNotifyUndeliveredOnMissedActor [GOOD] >> TInterconnectTest::TestPreSerializedBlobEventUpToMebibytes >> TestProtocols::TestHTTPCollectedVerySlow [GOOD] >> TInterconnectTest::TestBlobEvent [GOOD] >> TInterconnectTest::TestBlobEvent220Bytes >> TestProtocols::TestHTTPRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::SelectRangeWithNotFullKey [GOOD] Test command err: 2025-04-17T17:10:26.317931Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:10:26.343849Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:10:26.345503Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:126:2151] 2025-04-17T17:10:26.346270Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:10:26.439933Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:10:26.445146Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:10:26.576027Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:10:26.581315Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-17T17:10:26.581523Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-17T17:10:26.581675Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-17T17:10:26.582549Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:10:26.583104Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:10:26.583324Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:189:2151] in generation 2 2025-04-17T17:10:26.583876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:10:26.583973Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:10:26.650173Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:10:26.669957Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-17T17:10:26.670105Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:10:26.670172Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-04-17T17:10:26.670200Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-17T17:10:26.670229Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-17T17:10:26.670254Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:10:26.670373Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:10:26.670418Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:10:26.670599Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-17T17:10:26.670660Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-17T17:10:26.670751Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T17:10:26.670783Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:10:26.670832Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-17T17:10:26.670873Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T17:10:26.670900Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T17:10:26.670923Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-17T17:10:26.670948Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T17:10:26.671015Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:10:26.671048Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:10:26.671085Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-04-17T17:10:26.672701Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:126:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nx\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\016\n\010__tablet\030\004 9\032\023\n\r__updateEpoch\030\004 :\032\020\n\n__updateNo\030\004 ;(\"J\014/Root/table1\222\002\013\th\020\000\000\000\000\000\000\020\r" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-17T17:10:26.672745Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:10:26.672800Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T17:10:26.672902Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-17T17:10:26.672940Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-17T17:10:26.672980Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-17T17:10:26.673020Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T17:10:26.673045Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-17T17:10:26.673078Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-17T17:10:26.673101Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T17:10:26.673293Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-17T17:10:26.673312Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-17T17:10:26.673335Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-17T17:10:26.673357Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-17T17:10:26.673386Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-17T17:10:26.673410Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-17T17:10:26.673434Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-17T17:10:26.673463Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-17T17:10:26.673483Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-17T17:10:26.685078Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T17:10:26.685130Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T17:10:26.685153Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-17T17:10:26.685183Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-17T17:10:26.685233Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-17T17:10:26.685600Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:10:26.685631Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:10:26.685668Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-04-17T17:10:26.685714Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:126:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-17T17:10:26.685735Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:10:26.685825Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-17T17:10:26.685852Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T17:10:26.685892Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-17T17:10:26.685925Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-17T17:10:26.688128Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-17T17:10:26.688196Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:10:26.688341Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:10:26.688377Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:10:26.688418Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T17:10:26.688442Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:10:26.688463Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T17:10:26.688494Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-17T17:10:26.688519Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-17T17:10:26.688549Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T17:10:26.688576Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-17T17:10:26.688611Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-17T17:10:26.688642Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-17T17:10:26.688753Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-17T17:10:26.688786Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T17:10:26.688800Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-17T17:10:26.688813Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-17T17:10:26.688825Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-17T17:10:26.688874Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T17:10:26.688894Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-17T17:10:26.688913Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T17:10:26.688934Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T17:10:26.688962Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-17T17:10:26.688993Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-17T17:10:26.689014Z node 1 :TX_DATASHARD TR ... 627371Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T17:10:28.627406Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:6] at 9437184 on unit FinishPropose 2025-04-17T17:10:28.627438Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:10:28.630535Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [3:99:2134], Recipient [3:232:2225]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 99 RawX2: 12884904022 } 2025-04-17T17:10:28.630589Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2025-04-17T17:10:28.630829Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:300:2282], Recipient [3:232:2225]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:10:28.630859Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:10:28.630896Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:299:2281], serverId# [3:300:2282], sessionId# [0:0:0] 2025-04-17T17:10:28.631085Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:99:2134], Recipient [3:232:2225]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 99 RawX2: 12884904022 } TxBody: "\032\351\002\037\010\0021\010key1\010key2\nvalue\005\205\n\205\002\205\004\206\205\006\207\203\004\207\203\001H\207\203\001H\006\n\016\203\014\020List$Truncated\002\205\004\205\002?\022\002\205\000\034MyReads MyWrites\205\004\205\002?\022\002\206\202\024Reply\024Write?\030\205\002\206\203\010\002 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\024)\211\026?\022\203\005\004\200\205\006\203\004\203\004\203\004\006\n\016\213\004\203\004\207\203\001H\213\002\203\004\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?* h\020\000\000\000\000\000\000\016\000\000\000\000\000\000\000?\014\005?2\003?,D\003?.F\003?0p\007\013?:\003?4e\005\'?8\003\013?>\003?\000\003?@\000\003?B\000\006\004?F\003\203\014\000\003\203\014\000\003\003?H\000\377\007\002\000\005?\032\005?\026?r\000\005?\030\003\005? \005?\034?r\000\006\000?\036\003?x\005?&\006\0 2025-04-17T17:10:28.636788Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:10:28.636853Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T17:10:28.637394Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 9437184 on unit CheckDataTx 2025-04-17T17:10:28.637462Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 9437184 is Executed 2025-04-17T17:10:28.637497Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 9437184 executing on unit CheckDataTx 2025-04-17T17:10:28.637531Z node 3 :TX_DATASHARD TRACE: Add [0:8] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T17:10:28.637576Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T17:10:28.637612Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-17T17:10:28.637656Z node 3 :TX_DATASHARD TRACE: Activated operation [0:8] at 9437184 2025-04-17T17:10:28.637690Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 9437184 is Executed 2025-04-17T17:10:28.637711Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-17T17:10:28.637731Z node 3 :TX_DATASHARD TRACE: Add [0:8] at 9437184 to execution unit ExecuteDataTx 2025-04-17T17:10:28.637752Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 9437184 on unit ExecuteDataTx 2025-04-17T17:10:28.638183Z node 3 :TX_DATASHARD TRACE: Executed operation [0:8] at tablet 9437184 with status COMPLETE 2025-04-17T17:10:28.638241Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:8] at 9437184: {NSelectRow: 0, NSelectRange: 1, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 2, SelectRangeBytes: 31, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-17T17:10:28.638285Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 9437184 is Executed 2025-04-17T17:10:28.638309Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 9437184 executing on unit ExecuteDataTx 2025-04-17T17:10:28.638334Z node 3 :TX_DATASHARD TRACE: Add [0:8] at 9437184 to execution unit FinishPropose 2025-04-17T17:10:28.638357Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 9437184 on unit FinishPropose 2025-04-17T17:10:28.638399Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 8 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-17T17:10:28.638457Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 9437184 is DelayComplete 2025-04-17T17:10:28.638484Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 9437184 executing on unit FinishPropose 2025-04-17T17:10:28.638515Z node 3 :TX_DATASHARD TRACE: Add [0:8] at 9437184 to execution unit CompletedOperations 2025-04-17T17:10:28.638544Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 9437184 on unit CompletedOperations 2025-04-17T17:10:28.638578Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 9437184 is Executed 2025-04-17T17:10:28.638599Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 9437184 executing on unit CompletedOperations 2025-04-17T17:10:28.638622Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:8] at 9437184 has finished 2025-04-17T17:10:28.638666Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T17:10:28.638698Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:8] at 9437184 on unit FinishPropose 2025-04-17T17:10:28.638730Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 >> TInterconnectTest::TestConnectAndDisconnect >> TInterconnectTest::TestManyEvents [GOOD] >> TInterconnectTest::TestCrossConnect >> TInterconnectTest::OldFormatSuppressVersionCheck [GOOD] >> TKesusTest::TestSessionTimeoutAfterDetach [GOOD] >> TKesusTest::TestSessionTimeoutAfterReboot >> TKeyValueTest::TestWriteLongKey [GOOD] >> TInterconnectTest::TestBlobEvent220Bytes [GOOD] >> TInterconnectTest::TestAddressResolve >> TInterconnectTest::TestBlobEventDifferentSizesPreSerializedAndRaw [GOOD] >> TKesusTest::TestAcquireSemaphoreTimeout [GOOD] >> TKesusTest::TestAcquireSemaphoreTimeoutTooBig >> TActorActivity::Basic [GOOD] >> ActorBootstrapped::TestBootstrapped >> TKeyValueTest::TestBasicWriteRead [GOOD] >> TKeyValueTest::TestBasicWriteReadOverrun >> TInterconnectTest::TestConnectAndDisconnect [GOOD] >> TInterconnectTest::TestBlobEventPreSerialized >> TestProtocols::TestHTTPRequest [GOOD] >> TInterconnectTest::TestPreSerializedBlobEventUpToMebibytes [GOOD] >> TInterconnectTest::TestPingPongThroughSubChannel >> ActorBootstrapped::TestBootstrapped [GOOD] >> ActorBootstrapped::TestBootstrappedParent |74.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestBlobEventDifferentSizesPreSerializedAndRaw [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::OldFormatSuppressVersionCheck [GOOD] Test command err: 2025-04-17T17:10:28.163787Z node 4 :INTERCONNECT WARN: Handshake [4:20:2056] [node 3] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2025-04-17T17:10:29.555135Z node 5 :INTERCONNECT WARN: Handshake [5:18:2057] [node 6] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2025-04-17T17:10:31.821372Z node 7 :INTERCONNECT ERROR: Proxy [7:1:2048] [node 8] ICP35 event processing took too much time PendingNodeInfo:256(55725924ns) {Configure:104(55523151ns) {ProcessConfigured:134(11938914ns) {StartInitialHandshake:156(11936669ns) {DropHandshakes:464(3054ns) {DropIncomingHandshake:436(510ns) DropOutgoingHandshake:451(341ns)} GenerateSessionVirtualId:979(752ns) PrepareNewSessionHandshake:471(2820ns) {SwitchToState:173(1653ns)}}}}} 2025-04-17T17:10:31.902775Z node 8 :INTERCONNECT WARN: Handshake [8:20:2056] [node 7] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2025-04-17T17:10:31.937691Z node 7 :INTERCONNECT WARN: Handshake [7:18:2057] [node 8] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:76:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:79:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:78:2110] Leader for TabletID 72057594037927937 is [4:81:2111] sender: [4:82:2057] recipient: [4:78:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:81:2111] Leader for TabletID 72057594037927937 is [4:81:2111] sender: [4:135:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:77:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:81:2057] recipient: [5:79:2110] Leader for TabletID 72057594037927937 is [5:82:2111] sender: [5:83:2057] recipient: [5:79:2110] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:82:2111] Leader for TabletID 72057594037927937 is [5:82:2111] sender: [5:136:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:80:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:82:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:85:2114] sender: [7:86:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:85:2114] Leader for TabletID 72057594037927937 is [7:85:2114] sender: [7:139:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:81:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:85:2057] recipient: [8:84:2113] Leader for TabletID 72057594037927937 is [8:86:2114] sender: [8:87:2057] recipient: [8:84:2113] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:86:2114] Leader for TabletID 72057594037927937 is [8:86:2114] sender: [8:140:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] >> TKesusTest::TestSessionTimeoutAfterUnregister [GOOD] >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed >> TKesusTest::TestAcquireTimeout [GOOD] >> TKesusTest::TestAcquireSharedBlocked >> TInterconnectTest::TestSimplePingPong >> TInterconnectTest::TestAddressResolve [GOOD] >> TInterconnectTest::OldNbs >> ActorBootstrapped::TestBootstrappedParent [GOOD] >> TActorTracker::Basic >> TKeyValueTest::TestWriteTrimWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOkNewApi >> TInterconnectTest::TestBlobEventPreSerialized [GOOD] >> TInterconnectTest::TestBlobEventUpToMebibytes >> TInterconnectTest::TestPingPongThroughSubChannel [GOOD] |74.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TestProtocols::TestHTTPRequest [GOOD] >> TInterconnectTest::TestSimplePingPong [GOOD] >> TInterconnectTest::TestSubscribeByFlag >> TKesusTest::TestAcquireLocks [GOOD] >> TKesusTest::TestAcquireRepeat |74.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestPingPongThroughSubChannel [GOOD] >> TActorTracker::Basic [GOOD] >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform_Invalid_Encode [GOOD] >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform >> TKesusTest::TestAcquireSemaphoreTimeoutTooBig [GOOD] >> TKesusTest::TestAcquireSemaphoreTimeoutInfinite >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform [GOOD] >> TInterconnectTest::TestBlobEventUpToMebibytes [GOOD] >> TInterconnectTest::TestBlobEventsThroughSubChannels >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed [GOOD] >> TInterconnectTest::TestSubscribeByFlag [GOOD] >> TInterconnectTest::TestReconnect ------- [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TActorTracker::Basic [GOOD] Test command err: ASYNC_DESTROYER >> TKesusTest::TestAcquireSharedBlocked [GOOD] >> TKesusTest::TestAcquireTimeoutAfterReboot |74.2%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed [GOOD] Test command err: 2025-04-17T17:10:06.319314Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:06.319406Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:06.336231Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:06.336479Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:06.363984Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:06.883685Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:06.883742Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:06.899101Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:06.899481Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:06.927558Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:08.173193Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:08.173266Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:08.187732Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:08.188308Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:08.230225Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:08.232247Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:130:2156], cookie=6704183254874885151, session=0, seqNo=0) 2025-04-17T17:10:08.232736Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:10:08.250534Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:130:2156], cookie=6704183254874885151, session=1) 2025-04-17T17:10:08.252257Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:139:2163], cookie=8454407849157741754) 2025-04-17T17:10:08.252770Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:139:2163], cookie=8454407849157741754) 2025-04-17T17:10:08.708883Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:08.729261Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:09.133015Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:09.156148Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:09.530032Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:09.542121Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:09.905052Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:09.916541Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:10.312327Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:10.327766Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:10.708619Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:10.728866Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:11.092362Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:11.103854Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:11.451088Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:11.472653Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:11.848884Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:11.869007Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:12.313159Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:12.331186Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:12.769112Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:12.797183Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:13.224740Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:13.244896Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:13.672101Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:13.692217Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:14.110339Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:14.125837Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:14.564732Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:14.581364Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:15.024714Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:15.041177Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:15.448333Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:15.468937Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:15.860349Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:15.880780Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:16.304428Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:16.323941Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:16.764470Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:16.780324Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:17.200715Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:17.218928Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:17.627838Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:17.644124Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:18.033105Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:18.050786Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:18.419804Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:18.434166Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:18.794483Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:18.806205Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:19.167858Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:19.179391Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:19.530174Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:19.542315Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:19.888406Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:19.901990Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:20.289745Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:20.308717Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:20.748416Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:20.770537Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:21.192462Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:21.217001Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:21.636800Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:21.653354Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:22.060406Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:22.084820Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:22.483604Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:22.511362Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:22.932374Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:22.955877Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:23.368993Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:23.399003Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:23.820342Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:23.840858Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:24.336657Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:24.357255Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:24.770948Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:24.791199Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:25.256981Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:25.282861Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:25.692565Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:25.711258Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:26.125461Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:26.144521Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:26.519420Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:26.531674Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:26.881849Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:26.893355Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:27.290302Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:27.302858Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:27.659369Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:27.672048Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:28.047661Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:28.060695Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:28.413091Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:28.427738Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:28.772470Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:28.784450Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:29.212658Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:29.232997Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:29.638417Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:29.667202Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:30.110956Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:30.129000Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:30.541881Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:30.559473Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:30.984674Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:31.012984Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:31.475348Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:31.494087Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:31.892722Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:31.922227Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:32.327441Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:32.356844Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:32.811719Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:32.829169Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:33.225811Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:33.240627Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:33.729628Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-04-17T17:10:33.731975Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-04-17T17:10:33.761799Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-04-17T17:10:33.808604Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:470:2477], cookie=769562774360531881) 2025-04-17T17:10:33.809333Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:470:2477], cookie=769562774360531881) 2025-04-17T17:10:38.286804Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:38.287279Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:38.332647Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:38.332938Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:38.377070Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:38.400808Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:130:2156], cookie=123117759188667349, path="Root", config={ MaxUnitsPerSecond: 100 }) 2025-04-17T17:10:38.402041Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-17T17:10:38.436932Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:130:2156], cookie=123117759188667349) 2025-04-17T17:10:38.455244Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:139:2163]. Cookie: 0. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-17T17:10:38.455508Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[4:139:2163], cookie=0) 2025-04-17T17:10:38.456003Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:141:2165]. Cookie: 0. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-17T17:10:38.456029Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[4:141:2165], cookie=0) 2025-04-17T17:10:38.513145Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [4:141:2165]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 5 StateNotification { Status: SUCCESS } } } 2025-04-17T17:10:38.513435Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [4:139:2163]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 5 StateNotification { Status: SUCCESS } } } 2025-04-17T17:10:38.514400Z node 4 :KESUS_TABLET TRACE: Got TEvServerDisconnected([4:144:2168]) 2025-04-17T17:10:38.515755Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [4:141:2165]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 StateNotification { Status: SESSION_EXPIRED Issues { message: "Disconected." } } } } 2025-04-17T17:10:38.564504Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [4:139:2163]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 10 StateNotification { Status: SUCCESS } } } >> KqpStreamLookup::ReadTableDuringSplit >> TInterconnectTest::OldNbs [GOOD] >> TInterconnectTest::TestReconnect [GOOD] >> TInterconnectTest::TestSubscribeAndUnsubsribeByEvent >> TInterconnectTest::TestBlobEventsThroughSubChannels [GOOD] >> TTxDataShardMiniKQL::WriteValueTooLarge [GOOD] >> TTxDataShardMiniKQL::WriteLargeExternalBlob |74.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::OldNbs [GOOD] >> TKesusTest::TestAcquireRepeat [GOOD] >> TKesusTest::TestAcquireDowngrade >> TKesusTest::TestAcquireSemaphoreTimeoutInfinite [GOOD] >> TKesusTest::TestAcquireSemaphoreRebootTimeout |74.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest |74.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestBlobEventsThroughSubChannels [GOOD] |74.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> TInterconnectTest::TestSubscribeAndUnsubsribeByEvent [GOOD] >> TTopicWriterTests::TestEnterMessage_1KiB_Newline_Delimiter [GOOD] >> TTopicWriterTests::TestEnterMessage_1KiB_Newline_Delimited_With_Two_Delimiters_In_A_Row [GOOD] >> TTxDataShardMiniKQL::MemoryUsageMultiShard [GOOD] >> TKesusTest::TestAcquireDowngrade [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaSessionTimeout >> AggregateStatistics::ShouldBeCcorrectProcessingOfLocalTablets [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestSubscribeAndUnsubsribeByEvent [GOOD] Test command err: 2025-04-17T17:10:40.697047Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP77 @201 (null) -> PendingActivation 2025-04-17T17:10:40.697675Z node 6 :INTERCONNECT INFO: Proxy [6:9:2048] [node 5] ICP01 ready to work 2025-04-17T17:10:40.699749Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP77 @201 (null) -> PendingActivation 2025-04-17T17:10:40.699961Z node 5 :INTERCONNECT INFO: Proxy [5:1:2048] [node 6] ICP01 ready to work 2025-04-17T17:10:40.710026Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP77 @99 PendingActivation -> PendingNodeInfo 2025-04-17T17:10:40.715715Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP02 configured for host ::1:8634 2025-04-17T17:10:40.717359Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP77 @483 PendingNodeInfo -> PendingConnection 2025-04-17T17:10:40.720779Z node 5 :INTERCONNECT DEBUG: Handshake [5:19:2057] [node 6] ICH01 starting outgoing handshake 2025-04-17T17:10:40.723054Z node 5 :INTERCONNECT DEBUG: ICR04 Host: ::1, RESOLVED address 2025-04-17T17:10:40.735690Z node 5 :INTERCONNECT DEBUG: Handshake [5:19:2057] [node 6] ICH05 connected to peer 2025-04-17T17:10:40.750834Z node 6 :INTERCONNECT DEBUG: ICListener: [0:0:0] ICL04 Accepted from: ::1:46104 2025-04-17T17:10:40.755422Z node 6 :INTERCONNECT DEBUG: Handshake [6:21:2057] [node 0] ICH02 starting incoming handshake 2025-04-17T17:10:40.763120Z node 5 :INTERCONNECT DEBUG: Handshake [5:19:2057] [node 6] ICH07 SendExBlock ExRequest Protocol: 2 ProgramPID: 20730 ProgramStartTime: 1008244909666 Serial: 3208274602 ReceiverNodeId: 6 SenderActorId: "[5:3208274602:0]" SenderHostName: "::1" ReceiverHostName: "::1" UUID: "Cluster for process with id: 20730" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 20730" AcceptUUID: "Cluster for process with id: 20730" } RequestModernFrame: true RequestAuthOnly: false RequestExtendedTraceFmt: true RequestExternalDataChannel: true HandshakeId: "\036\341X\346\371\357\233\263-\255\260%\003A\"p\355%G\250=\265D(\320\244\022\207\306\313\020\017" RequestXxhash: true RequestXdcShuffle: true 2025-04-17T17:10:40.767324Z node 6 :INTERCONNECT DEBUG: Handshake [6:21:2057] [node 5] ICH07 ReceiveExBlock ExRequest Protocol: 2 ProgramPID: 20730 ProgramStartTime: 1008244909666 Serial: 3208274602 ReceiverNodeId: 6 SenderActorId: "[5:3208274602:0]" SenderHostName: "::1" ReceiverHostName: "::1" UUID: "Cluster for process with id: 20730" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 20730" AcceptUUID: "Cluster for process with id: 20730" } RequestModernFrame: true RequestAuthOnly: false RequestExtendedTraceFmt: true RequestExternalDataChannel: true HandshakeId: "\036\341X\346\371\357\233\263-\255\260%\003A\"p\355%G\250=\265D(\320\244\022\207\306\313\020\017" RequestXxhash: true RequestXdcShuffle: true 2025-04-17T17:10:40.768553Z node 6 :INTERCONNECT WARN: Handshake [6:21:2057] [node 5] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2025-04-17T17:10:40.770682Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP77 @99 PendingActivation -> PendingNodeInfo 2025-04-17T17:10:40.779373Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP02 configured for host ::1:11945 2025-04-17T17:10:40.779628Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP17 incoming handshake (actor [6:21:2057]) 2025-04-17T17:10:40.780051Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP77 @483 PendingNodeInfo -> PendingConnection 2025-04-17T17:10:40.780431Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP07 issued incoming handshake reply 2025-04-17T17:10:40.781229Z node 6 :INTERCONNECT INFO: Proxy [6:9:2048] [node 5] ICP08 No active sessions, becoming PendingConnection 2025-04-17T17:10:40.781990Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP77 @220 PendingConnection -> PendingConnection 2025-04-17T17:10:40.785593Z node 6 :INTERCONNECT DEBUG: Handshake [6:21:2057] [node 5] ICH07 SendExBlock ExReply Success { Protocol: 2 ProgramPID: 20730 ProgramStartTime: 1008868125960 Serial: 1882142571 SenderActorId: "[6:1882142571:0]" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 20730" AcceptUUID: "Cluster for process with id: 20730" } StartEncryption: false UseModernFrame: true AuthOnly: false UseExtendedTraceFmt: true UseExternalDataChannel: true UseXxhash: true UseXdcShuffle: true } 2025-04-17T17:10:40.787725Z node 5 :INTERCONNECT DEBUG: Handshake [5:19:2057] [node 6] ICH07 ReceiveExBlock ExReply Success { Protocol: 2 ProgramPID: 20730 ProgramStartTime: 1008868125960 Serial: 1882142571 SenderActorId: "[6:1882142571:0]" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 20730" AcceptUUID: "Cluster for process with id: 20730" } StartEncryption: false UseModernFrame: true AuthOnly: false UseExtendedTraceFmt: true UseExternalDataChannel: true UseXxhash: true UseXdcShuffle: true } 2025-04-17T17:10:40.788656Z node 5 :INTERCONNECT WARN: Handshake [5:19:2057] [node 6] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2025-04-17T17:10:40.789697Z node 5 :INTERCONNECT DEBUG: ICR04 Host: ::1, RESOLVED address 2025-04-17T17:10:40.790576Z node 5 :INTERCONNECT DEBUG: Handshake [5:19:2057] [node 6] ICH07 SendExBlock ExternalDataChannelParams HandshakeId: "\036\341X\346\371\357\233\263-\255\260%\003A\"p\355%G\250=\265D(\320\244\022\207\306\313\020\017" 2025-04-17T17:10:40.790671Z node 5 :INTERCONNECT INFO: Handshake [5:19:2057] [node 6] ICH04 handshake succeeded 2025-04-17T17:10:40.790928Z node 5 :INTERCONNECT INFO: Proxy [5:1:2048] [node 6] ICP20 outgoing handshake succeeded 2025-04-17T17:10:40.790989Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP052 dropped outgoing handshake: [5:19:2057] poison: false 2025-04-17T17:10:40.791032Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP77 @350 PendingConnection -> StateWork 2025-04-17T17:10:40.791159Z node 5 :INTERCONNECT INFO: Proxy [5:1:2048] [node 6] ICP22 created new session: [5:23:2048] 2025-04-17T17:10:40.791213Z node 5 :INTERCONNECT_SESSION INFO: Session [5:23:2048] [node 6] ICS09 handshake done sender: [5:19:2057] self: [5:3208274602:0] peer: [6:1882142571:0] socket: 24 2025-04-17T17:10:40.791271Z node 5 :INTERCONNECT_SESSION INFO: Session [5:23:2048] [node 6] ICS10 traffic start 2025-04-17T17:10:40.791348Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS11 registering socket in PollerActor 2025-04-17T17:10:40.791407Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS23 confirm count: 0 2025-04-17T17:10:40.791443Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS06 rewind SendQueue size# 0 LastConfirmed# 0 NextSerial# 1 2025-04-17T17:10:40.791499Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS23 confirm count: 0 2025-04-17T17:10:40.791549Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS04 subscribe for session state for [5:17:2056] 2025-04-17T17:10:40.791671Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:24:2048] [node 6] ICIS01 InputSession created 2025-04-17T17:10:40.791715Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:24:2048] [node 6] ICIS02 ReceiveData called 2025-04-17T17:10:40.791784Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:24:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-04-17T17:10:40.791875Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:24:2048] [node 6] ICIS02 ReceiveData called 2025-04-17T17:10:40.791908Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:24:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-04-17T17:10:40.791957Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:24:2048] [node 6] ICIS02 ReceiveData called 2025-04-17T17:10:40.791988Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:24:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-04-17T17:10:40.792033Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS23 confirm count: 0 2025-04-17T17:10:40.792066Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS23 confirm count: 0 2025-04-17T17:10:40.792143Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS23 confirm count: 0 2025-04-17T17:10:40.792191Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS23 confirm count: 0 2025-04-17T17:10:40.792256Z node 6 :INTERCONNECT DEBUG: ICListener: [0:0:0] ICL04 Accepted from: ::1:46120 2025-04-17T17:10:40.792633Z node 6 :INTERCONNECT DEBUG: Handshake [6:25:2058] [node 0] ICH02 starting incoming handshake 2025-04-17T17:10:40.793574Z node 6 :INTERCONNECT INFO: Handshake [6:21:2057] [node 5] ICH04 handshake succeeded 2025-04-17T17:10:40.793746Z node 6 :INTERCONNECT INFO: Proxy [6:9:2048] [node 5] ICP19 incoming handshake succeeded 2025-04-17T17:10:40.793798Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP111 dropped incoming handshake: [6:21:2057] poison: false 2025-04-17T17:10:40.793848Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP77 @350 PendingConnection -> StateWork 2025-04-17T17:10:40.793935Z node 6 :INTERCONNECT INFO: Proxy [6:9:2048] [node 5] ICP22 created new session: [6:26:2048] 2025-04-17T17:10:40.793982Z node 6 :INTERCONNECT_SESSION INFO: Session [6:26:2048] [node 5] ICS09 handshake done sender: [6:21:2057] self: [6:1882142571:0] peer: [5:3208274602:0] socket: 25 2025-04-17T17:10:40.794016Z node 6 :INTERCONNECT_SESSION INFO: Session [6:26:2048] [node 5] ICS10 traffic start 2025-04-17T17:10:40.794073Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS11 registering socket in PollerActor 2025-04-17T17:10:40.794122Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 0 2025-04-17T17:10:40.794152Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS06 rewind SendQueue size# 0 LastConfirmed# 0 NextSerial# 1 2025-04-17T17:10:40.794195Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 0 2025-04-17T17:10:40.794277Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:27:2048] [node 5] ICIS01 InputSession created 2025-04-17T17:10:40.794307Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:27:2048] [node 5] ICIS02 ReceiveData called 2025-04-17T17:10:40.794378Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:27:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-04-17T17:10:40.794437Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:27:2048] [node 5] ICIS02 ReceiveData called 2025-04-17T17:10:40.794462Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:27:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-04-17T17:10:40.794510Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:27:2048] [node 5] ICIS02 ReceiveData called 2025-04-17T17:10:40.794541Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:27:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-04-17T17:10:40.794594Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 0 2025-04-17T17:10:40.794621Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 0 2025-04-17T17:10:40.794676Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 0 2025-04-17T17:10:40.794702Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 0 2025-04-17T17:10:40.794806Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS02 send event from: [5:17:2056] to: [6:18:2056] 2025-04-17T17:10:40.794946Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS22 outgoing packet Serial# 1 Confirm# 0 DataSize# 84 InflightDataAmount# 84 2025-04-17T17:10:40.795027Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS23 confirm count: 0 2025-04-17T17:10:40.795538Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:27:2048] [node 5] ICIS02 ReceiveData called 2025-04-17T17:10:40.795606Z nod ... NNECT_SESSION INFO: Session [6:26:2048] [node 5] ICS07 socket disconnect 25 reason# ECONNRESET 2025-04-17T17:10:40.799400Z node 6 :INTERCONNECT_SESSION INFO: Session [6:26:2048] [node 5] ICS25 shutdown socket, reason# ECONNRESET 2025-04-17T17:10:40.799493Z node 6 :INTERCONNECT_SESSION INFO: Session [6:26:2048] [node 5] ICS15 start handshake 2025-04-17T17:10:40.799935Z node 6 :INTERCONNECT DEBUG: Handshake [6:29:2059] [node 5] ICH01 starting outgoing handshake 2025-04-17T17:10:40.800068Z node 6 :INTERCONNECT DEBUG: ICR04 Host: ::1, RESOLVED address 2025-04-17T17:10:40.800228Z node 5 :INTERCONNECT DEBUG: ICR04 Host: ::1, RESOLVED address 2025-04-17T17:10:40.801102Z node 6 :INTERCONNECT DEBUG: ICListener: [0:0:0] ICL04 Accepted from: ::1:46128 2025-04-17T17:10:40.801217Z node 5 :INTERCONNECT DEBUG: ICListener: [0:0:0] ICL04 Accepted from: ::1:38676 2025-04-17T17:10:40.801608Z node 6 :INTERCONNECT DEBUG: Handshake [6:29:2059] [node 5] ICH05 connected to peer 2025-04-17T17:10:40.801741Z node 6 :INTERCONNECT DEBUG: Handshake [6:29:2059] [node 5] ICH07 SendExBlock ExRequest HandshakeId: "\276P\312\326\325\\\376\027\025\233\003\035\260L\231\261|\236\203\272\023*\251\322\300\321\000@\007\004\336\234" 2025-04-17T17:10:40.802130Z node 6 :INTERCONNECT DEBUG: Handshake [6:32:2060] [node 0] ICH02 starting incoming handshake 2025-04-17T17:10:40.803466Z node 5 :INTERCONNECT DEBUG: Handshake [5:28:2058] [node 6] ICH05 connected to peer 2025-04-17T17:10:40.804270Z node 5 :INTERCONNECT DEBUG: Handshake [5:28:2058] [node 6] ICH07 SendExBlock ExRequest HandshakeId: "V\232 P\245Z\275ZS{\031\213n\236\001\333\226\022\010&\314\204\017\2521\206F\364G\315J\t" 2025-04-17T17:10:40.808269Z node 5 :INTERCONNECT DEBUG: Handshake [5:33:2059] [node 0] ICH02 starting incoming handshake 2025-04-17T17:10:40.812429Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP09 (actor [6:32:2060]) from: [5:3208274602:0] for: [6:1882142571:0] 2025-04-17T17:10:40.812741Z node 6 :INTERCONNECT_SESSION INFO: Session [6:26:2048] [node 5] ICS08 incoming handshake Self# [5:3208274602:0] Peer# [6:1882142571:0] Counter# 1 LastInputSerial# 1 2025-04-17T17:10:40.813332Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP07 issued incoming handshake reply 2025-04-17T17:10:40.813895Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP09 (actor [5:33:2059]) from: [6:1882142571:0] for: [5:3208274602:0] 2025-04-17T17:10:40.813934Z node 5 :INTERCONNECT_SESSION INFO: Session [5:23:2048] [node 6] ICS08 incoming handshake Self# [6:1882142571:0] Peer# [5:3208274602:0] Counter# 1 LastInputSerial# 1 2025-04-17T17:10:40.813968Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP06 reply for incoming handshake (actor [5:33:2059]) is held 2025-04-17T17:10:40.815588Z node 5 :INTERCONNECT DEBUG: ICR04 Host: ::1, RESOLVED address 2025-04-17T17:10:40.817859Z node 5 :INTERCONNECT DEBUG: Handshake [5:28:2058] [node 6] ICH07 SendExBlock ExternalDataChannelParams HandshakeId: "V\232 P\245Z\275ZS{\031\213n\236\001\333\226\022\010&\314\204\017\2521\206F\364G\315J\t" 2025-04-17T17:10:40.819364Z node 5 :INTERCONNECT INFO: Handshake [5:28:2058] [node 6] ICH04 handshake succeeded 2025-04-17T17:10:40.821332Z node 5 :INTERCONNECT INFO: Proxy [5:1:2048] [node 6] ICP20 outgoing handshake succeeded 2025-04-17T17:10:40.821389Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP111 dropped incoming handshake: [5:33:2059] poison: true 2025-04-17T17:10:40.821441Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP052 dropped outgoing handshake: [5:28:2058] poison: false 2025-04-17T17:10:40.821482Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP77 @350 StateWork -> StateWork 2025-04-17T17:10:40.821527Z node 5 :INTERCONNECT_SESSION INFO: Session [5:23:2048] [node 6] ICS09 handshake done sender: [5:28:2058] self: [5:3208274602:0] peer: [6:1882142571:0] socket: 29 2025-04-17T17:10:40.821572Z node 5 :INTERCONNECT_SESSION INFO: Session [5:23:2048] [node 6] ICS10 traffic start 2025-04-17T17:10:40.821644Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS11 registering socket in PollerActor 2025-04-17T17:10:40.821695Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS23 confirm count: 1 2025-04-17T17:10:40.821730Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS06 rewind SendQueue size# 1 LastConfirmed# 1 NextSerial# 2 2025-04-17T17:10:40.821808Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS23 confirm count: 1 2025-04-17T17:10:40.821894Z node 6 :INTERCONNECT DEBUG: ICListener: [0:0:0] ICL04 Accepted from: ::1:46136 2025-04-17T17:10:40.822319Z node 6 :INTERCONNECT DEBUG: Handshake [6:36:2061] [node 0] ICH02 starting incoming handshake 2025-04-17T17:10:40.822906Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS01 InputSession created 2025-04-17T17:10:40.824072Z node 6 :INTERCONNECT NOTICE: Proxy [6:9:2048] [node 5] ICP25 outgoing handshake failed, temporary: 0 explanation: outgoing handshake Peer# ::1(::1:11945) Socket error# connection unexpectedly closed state# ReceiveResponse processed# 0 remain# 52 incoming: [6:32:2060] held: no 2025-04-17T17:10:40.824133Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP052 dropped outgoing handshake: [6:29:2059] poison: false 2025-04-17T17:10:40.824172Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP28 other handshake is still going on 2025-04-17T17:10:40.824217Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS02 ReceiveData called 2025-04-17T17:10:40.824282Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-04-17T17:10:40.824813Z node 6 :INTERCONNECT INFO: Handshake [6:32:2060] [node 5] ICH04 handshake succeeded 2025-04-17T17:10:40.825014Z node 6 :INTERCONNECT INFO: Proxy [6:9:2048] [node 5] ICP19 incoming handshake succeeded 2025-04-17T17:10:40.825054Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP111 dropped incoming handshake: [6:32:2060] poison: false 2025-04-17T17:10:40.825094Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP77 @350 StateWork -> StateWork 2025-04-17T17:10:40.825132Z node 6 :INTERCONNECT_SESSION INFO: Session [6:26:2048] [node 5] ICS09 handshake done sender: [6:32:2060] self: [6:1882142571:0] peer: [5:3208274602:0] socket: 30 2025-04-17T17:10:40.825166Z node 6 :INTERCONNECT_SESSION INFO: Session [6:26:2048] [node 5] ICS10 traffic start 2025-04-17T17:10:40.825218Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS11 registering socket in PollerActor 2025-04-17T17:10:40.825250Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 1 2025-04-17T17:10:40.825281Z node 6 :INTERCONNECT_SESSION DEBUG: OutputChannel 0 [node 5] ICOCH98 Dropping confirmed messages 2025-04-17T17:10:40.825325Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS24 exit InflightDataAmount: 0 bytes droppedDataAmount: 84 bytes dropped 1 packets 2025-04-17T17:10:40.825370Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS06 rewind SendQueue size# 0 LastConfirmed# 1 NextSerial# 2 2025-04-17T17:10:40.825394Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 1 2025-04-17T17:10:40.825470Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:37:2048] [node 5] ICIS01 InputSession created 2025-04-17T17:10:40.825495Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS02 ReceiveData called 2025-04-17T17:10:40.825536Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-04-17T17:10:40.825567Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:37:2048] [node 5] ICIS02 ReceiveData called 2025-04-17T17:10:40.825642Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:37:2048] [node 5] ICIS12 Read recvres# 106 num# 1 err# 2025-04-17T17:10:40.825705Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:37:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-04-17T17:10:40.825734Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:37:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-04-17T17:10:40.825768Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS02 ReceiveData called 2025-04-17T17:10:40.825787Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-04-17T17:10:40.825962Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:37:2048] [node 5] ICIS02 ReceiveData called 2025-04-17T17:10:40.825984Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:37:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-04-17T17:10:40.826013Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS23 confirm count: 1 2025-04-17T17:10:40.826043Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS23 confirm count: 1 2025-04-17T17:10:40.826135Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:37:2048] [node 5] ICIS02 ReceiveData called 2025-04-17T17:10:40.826166Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:37:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-04-17T17:10:40.826195Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS23 confirm count: 1 2025-04-17T17:10:40.826213Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS23 confirm count: 1 2025-04-17T17:10:40.826245Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 1 2025-04-17T17:10:40.826276Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 1 2025-04-17T17:10:40.826313Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS02 send event from: [6:18:2056] to: [5:17:2056] 2025-04-17T17:10:40.826368Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS22 outgoing packet Serial# 2 Confirm# 2 DataSize# 84 InflightDataAmount# 84 2025-04-17T17:10:40.826419Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 1 2025-04-17T17:10:40.826441Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 1 2025-04-17T17:10:40.826477Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 1 2025-04-17T17:10:40.826501Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS02 ReceiveData called 2025-04-17T17:10:40.826528Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS12 Read recvres# 106 num# 1 err# 2025-04-17T17:10:40.826582Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-04-17T17:10:40.826627Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS23 confirm count: 2 2025-04-17T17:10:40.826648Z node 5 :INTERCONNECT_SESSION DEBUG: OutputChannel 0 [node 6] ICOCH98 Dropping confirmed messages 2025-04-17T17:10:40.826705Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS24 exit InflightDataAmount: 0 bytes droppedDataAmount: 84 bytes dropped 1 packets 2025-04-17T17:10:40.826746Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS23 confirm count: 2 2025-04-17T17:10:40.826803Z node 5 :INTERCONNECT_SESSION INFO: Session [5:23:2048] [node 6] ICS01 socket: 29 reason# 2025-04-17T17:10:40.826839Z node 5 :INTERCONNECT INFO: Proxy [5:1:2048] [node 6] ICP30 unregister session Session# [5:23:2048] VirtualId# [5:3208274602:0] 2025-04-17T17:10:40.826885Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP77 @201 StateWork -> PendingActivation 2025-04-17T17:10:40.826915Z node 5 :INTERCONNECT_SESSION INFO: Session [5:23:2048] [node 6] ICS25 shutdown socket, reason# 2025-04-17T17:10:40.826988Z node 5 :INTERCONNECT_SESSION DEBUG: OutputChannel 0 [node 6] ICOCH89 Notyfying about Undelivered messages! NotYetConfirmed size: 0, Queue size: 0 >> AggregateStatistics::ShouldBePings >> GenericFederatedQuery::ClickHouseManagedSelectAll >> AggregateStatistics::ShouldBePings [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCcorrectProcessingOfLocalTablets [GOOD] Test command err: 2025-04-17T17:10:42.354327Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-17T17:10:42.360724Z node 1 :STATISTICS DEBUG: EvClientDestroyed, node id = 1, client id = [1:9:2056], server id = [1:9:2056], tablet id = 2 2025-04-17T17:10:42.360778Z node 1 :STATISTICS DEBUG: Tablet 2 is not local. 2025-04-17T17:10:42.360864Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2025-04-17T17:10:42.361485Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:8:2055], server id = [1:8:2055], tablet id = 1, status = ERROR 2025-04-17T17:10:42.361519Z node 1 :STATISTICS DEBUG: Tablet 1 is not local. 2025-04-17T17:10:42.361615Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2025-04-17T17:10:42.361631Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-17T17:10:42.361657Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:11:2058], server id = [1:11:2058], tablet id = 4, status = ERROR 2025-04-17T17:10:42.361671Z node 1 :STATISTICS DEBUG: Tablet 4 is not local. 2025-04-17T17:10:42.361715Z node 1 :STATISTICS DEBUG: EvClientDestroyed, node id = 1, client id = [1:12:2059], server id = [1:12:2059], tablet id = 5 2025-04-17T17:10:42.361736Z node 1 :STATISTICS DEBUG: Tablet 5 is not local. 2025-04-17T17:10:42.361790Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 6 2025-04-17T17:10:42.361840Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:14:2061], server id = [1:14:2061], tablet id = 7, status = ERROR 2025-04-17T17:10:42.361855Z node 1 :STATISTICS DEBUG: Tablet 7 is not local. 2025-04-17T17:10:42.361879Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [0:0:0], tablet id = 6, status = ERROR 2025-04-17T17:10:42.361891Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-17T17:10:42.361910Z node 1 :STATISTICS DEBUG: EvClientDestroyed, node id = 1, client id = [1:15:2062], server id = [1:15:2062], tablet id = 8 2025-04-17T17:10:42.361923Z node 1 :STATISTICS DEBUG: Tablet 8 is not local. 2025-04-17T17:10:42.361940Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 >> GenericFederatedQuery::IcebergHiveBasicSelectAll |74.2%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_1KiB_Newline_Delimited_With_Two_Delimiters_In_A_Row [GOOD] >> GenericFederatedQuery::IcebergHiveSaSelectAll ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::MemoryUsageMultiShard [GOOD] Test command err: 2025-04-17T17:10:27.139651Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:10:27.151079Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:10:27.151529Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:126:2151] 2025-04-17T17:10:27.151748Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:10:27.191984Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:10:27.197095Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:10:27.268286Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:10:27.269835Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-17T17:10:27.269904Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-17T17:10:27.269960Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-17T17:10:27.270268Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:10:27.270663Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:10:27.270722Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:189:2151] in generation 2 2025-04-17T17:10:27.270979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:10:27.271016Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:10:27.334863Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:10:27.377609Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-17T17:10:27.377796Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:10:27.377901Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-04-17T17:10:27.377941Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-17T17:10:27.377968Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-17T17:10:27.377995Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:10:27.378105Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:10:27.378133Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:10:27.378309Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-17T17:10:27.378371Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-17T17:10:27.378442Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T17:10:27.378464Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:10:27.378583Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-17T17:10:27.378613Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T17:10:27.378642Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T17:10:27.378674Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-17T17:10:27.378695Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T17:10:27.378757Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:10:27.378776Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:10:27.378812Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-04-17T17:10:27.380432Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:126:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-17T17:10:27.380471Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:10:27.380531Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T17:10:27.380628Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-17T17:10:27.380656Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-17T17:10:27.380696Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-17T17:10:27.380730Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T17:10:27.380751Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-17T17:10:27.380782Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-17T17:10:27.380812Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T17:10:27.380993Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-17T17:10:27.381015Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-17T17:10:27.381033Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-17T17:10:27.381061Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-17T17:10:27.381087Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-17T17:10:27.381112Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-17T17:10:27.381150Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-17T17:10:27.381171Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-17T17:10:27.381184Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-17T17:10:27.395020Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T17:10:27.395088Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T17:10:27.395116Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-17T17:10:27.395153Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-17T17:10:27.395206Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-17T17:10:27.395693Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:10:27.395747Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:10:27.395796Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-04-17T17:10:27.395883Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:126:2151]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2025-04-17T17:10:27.395926Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:10:27.396056Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2025-04-17T17:10:27.396102Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-17T17:10:27.396177Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2025-04-17T17:10:27.396208Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit PlanQueue 2025-04-17T17:10:27.399899Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2025-04-17T17:10:27.399956Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:10:27.400247Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:10:27.400282Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:10:27.400330Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T17:10:27.400365Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:10:27.400394Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T17:10:27.400440Z node 1 :TX_DATASHARD DEBUG: Found ready operation [2:1] in PlanQueue unit at 9437184 2025-04-17T17:10:27.400479Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit PlanQueue 2025-04-17T17:10:27.400519Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-17T17:10:27.400562Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit PlanQueue 2025-04-17T17:10:27.400598Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit LoadTxDetails 2025-04-17T17:10:27.400641Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit LoadTxDetails 2025-04-17T17:10:27.400759Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Restart 2025-04-17T17:10:27.400781Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:10:27.400801Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T17:10:27.400826Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T17:10:27.400856Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T17:10:27.401078Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T17:10:27.401112Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit LoadTxDetails 2025-04-17T17:10:27.401239Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 2:1 keys extracted: 0 2025-04-17T17:10:27.401277Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-17T17:10:27.401324Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit LoadTxDetails 2025-04-17T17:10:27.401355Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-17T17:10:27.401390Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-17T17:10:27.401442Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T17:10:27.401462Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit ProtectSchemeEc ... at 9437185 2025-04-17T17:10:40.974787Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 4 -> retry Change{16, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-17T17:10:40.974813Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} touch new 0b, 0b lo load (0b in total), 86213808b requested for data (96990534b in total) 2025-04-17T17:10:40.974836Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release tx data 2025-04-17T17:10:40.974854Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} released on update Res{3 10776726b}, Memory{0 dyn 0} 2025-04-17T17:10:40.975056Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} update Res{3 96990534b} type transaction 2025-04-17T17:10:40.975099Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} acquired dyn mem Res{3 96990534b}, Memory{0 dyn 96990534} 2025-04-17T17:10:40.975156Z node 3 :RESOURCE_BROKER DEBUG: Update task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:365:2311]) (priority=5 type=transaction resources={0, 96990534} resubmit=1) 2025-04-17T17:10:40.975185Z node 3 :RESOURCE_BROKER DEBUG: Assigning waiting task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:365:2311]) to queue queue_transaction 2025-04-17T17:10:40.975212Z node 3 :RESOURCE_BROKER DEBUG: Allocate resources {0, 96990534} for task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:365:2311]) from queue queue_transaction 2025-04-17T17:10:40.975234Z node 3 :RESOURCE_BROKER DEBUG: Assigning in-fly task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:365:2311]) to queue queue_transaction 2025-04-17T17:10:40.975257Z node 3 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_transaction from 16.936776 to 33.873553 (insert task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:365:2311])) 2025-04-17T17:10:40.975306Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} acquired dyn mem Res{3 96990534b}, Memory{0 dyn 96990534} 2025-04-17T17:10:40.975342Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T17:10:40.975359Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437184 on unit ExecuteDataTx 2025-04-17T17:10:40.977467Z node 3 :TX_DATASHARD DEBUG: tx 5 at 9437184 restored its data 2025-04-17T17:10:41.702713Z node 3 :TX_DATASHARD TRACE: Executed operation [6:5] at tablet 9437184 with status COMPLETE 2025-04-17T17:10:41.702778Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [6:5] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 2, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 22, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-17T17:10:41.702824Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T17:10:41.702850Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437184 executing on unit ExecuteDataTx 2025-04-17T17:10:41.702872Z node 3 :TX_DATASHARD TRACE: Add [6:5] at 9437184 to execution unit CompleteOperation 2025-04-17T17:10:41.702894Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437184 on unit CompleteOperation 2025-04-17T17:10:41.703083Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437184 is DelayComplete 2025-04-17T17:10:41.703108Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437184 executing on unit CompleteOperation 2025-04-17T17:10:41.703129Z node 3 :TX_DATASHARD TRACE: Add [6:5] at 9437184 to execution unit CompletedOperations 2025-04-17T17:10:41.703148Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437184 on unit CompletedOperations 2025-04-17T17:10:41.703182Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437184 is Executed 2025-04-17T17:10:41.703200Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437184 executing on unit CompletedOperations 2025-04-17T17:10:41.703219Z node 3 :TX_DATASHARD TRACE: Execution plan for [6:5] at 9437184 has finished 2025-04-17T17:10:41.703252Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:10:41.703271Z node 3 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T17:10:41.703290Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T17:10:41.703310Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T17:10:41.703380Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 5 -> done Change{16, redo 636b alter 0b annex 0, ~{ 1001, 1, 3, 4, 12, 7, 8, 5 } -{ }, 0 gb} 2025-04-17T17:10:41.703417Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release Res{3 96990534b}, Memory{0 dyn 0} 2025-04-17T17:10:41.703542Z node 3 :RESOURCE_BROKER DEBUG: Finish task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:254:2225]) (release resources {0, 96990534}) 2025-04-17T17:10:41.703581Z node 3 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_transaction from 33.873553 to 16.936776 (remove task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:254:2225])) 2025-04-17T17:10:41.703670Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437185 2025-04-17T17:10:41.703692Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437185 on unit ExecuteDataTx 2025-04-17T17:10:41.704404Z node 3 :TX_DATASHARD DEBUG: tx 5 at 9437185 restored its data 2025-04-17T17:10:41.951439Z node 3 :TX_DATASHARD TRACE: Executed operation [6:5] at tablet 9437185 with status COMPLETE 2025-04-17T17:10:41.951512Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [6:5] at 9437185: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 2, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 22, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-17T17:10:41.951568Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437185 is ExecutedNoMoreRestarts 2025-04-17T17:10:41.951607Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437185 executing on unit ExecuteDataTx 2025-04-17T17:10:41.951645Z node 3 :TX_DATASHARD TRACE: Add [6:5] at 9437185 to execution unit CompleteOperation 2025-04-17T17:10:41.951670Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437185 on unit CompleteOperation 2025-04-17T17:10:41.951865Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437185 is DelayComplete 2025-04-17T17:10:41.951896Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437185 executing on unit CompleteOperation 2025-04-17T17:10:41.951919Z node 3 :TX_DATASHARD TRACE: Add [6:5] at 9437185 to execution unit CompletedOperations 2025-04-17T17:10:41.951955Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437185 on unit CompletedOperations 2025-04-17T17:10:41.951980Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437185 is Executed 2025-04-17T17:10:41.952000Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437185 executing on unit CompletedOperations 2025-04-17T17:10:41.952022Z node 3 :TX_DATASHARD TRACE: Execution plan for [6:5] at 9437185 has finished 2025-04-17T17:10:41.952046Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:10:41.952067Z node 3 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-04-17T17:10:41.952133Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2025-04-17T17:10:41.952157Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2025-04-17T17:10:41.952245Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 5 -> done Change{16, redo 636b alter 0b annex 0, ~{ 1001, 1, 3, 4, 12, 7, 8, 5 } -{ }, 0 gb} 2025-04-17T17:10:41.952291Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release Res{3 96990534b}, Memory{0 dyn 0} 2025-04-17T17:10:41.952444Z node 3 :RESOURCE_BROKER DEBUG: Finish task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:365:2311]) (release resources {0, 96990534}) 2025-04-17T17:10:41.952488Z node 3 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_transaction from 16.936776 to 0.000000 (remove task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:365:2311])) 2025-04-17T17:10:41.969023Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:10} commited cookie 1 for step 9 2025-04-17T17:10:41.969079Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-04-17T17:10:41.969107Z node 3 :TX_DATASHARD TRACE: Complete execution for [6:5] at 9437185 on unit CompleteOperation 2025-04-17T17:10:41.969165Z node 3 :TX_DATASHARD DEBUG: Complete [6 : 5] from 9437185 at tablet 9437185 send result to client [3:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-04-17T17:10:41.969240Z node 3 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437185 {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 2} 2025-04-17T17:10:41.969282Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2025-04-17T17:10:41.969549Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:10} commited cookie 1 for step 9 2025-04-17T17:10:41.969576Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T17:10:41.969600Z node 3 :TX_DATASHARD TRACE: Complete execution for [6:5] at 9437184 on unit CompleteOperation 2025-04-17T17:10:41.969638Z node 3 :TX_DATASHARD DEBUG: Complete [6 : 5] from 9437184 at tablet 9437184 send result to client [3:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-04-17T17:10:41.969683Z node 3 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2025-04-17T17:10:41.969717Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:10:41.969900Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [3:343:2311], Recipient [3:454:2396]: {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 2} 2025-04-17T17:10:41.969951Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:10:41.969995Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 9437186 source 9437186 dest 9437185 consumer 9437185 txId 5 2025-04-17T17:10:41.970064Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [3:232:2225], Recipient [3:454:2396]: {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2025-04-17T17:10:41.970086Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:10:41.970106Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 9437186 source 9437186 dest 9437184 consumer 9437184 txId 5 |74.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |74.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |74.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build >> GenericFederatedQuery::YdbManagedSelectAll ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBePings [GOOD] Test command err: 2025-04-17T17:10:42.629071Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-17T17:10:42.644142Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-17T17:10:42.764652Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 2 2025-04-17T17:10:42.764736Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-04-17T17:10:42.764784Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2025-04-17T17:10:42.765407Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:19:2055], server id = [0:0:0], tablet id = 2, status = ERROR 2025-04-17T17:10:42.765446Z node 2 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-17T17:10:42.765486Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:16:2056], server id = [0:0:0], tablet id = 1, status = ERROR 2025-04-17T17:10:42.765540Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-17T17:10:42.765601Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 2 2025-04-17T17:10:42.765633Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 >> UpsertLoad::ShouldCreateTable [GOOD] >> UpsertLoad::ShouldDropCreateTable |74.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |74.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorks [GOOD] >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorksNewApi |74.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer >> TKeyValueTest::TestRenameWorks [GOOD] >> TKeyValueTest::TestRenameWorksNewApi >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom [GOOD] >> GenericFederatedQuery::IcebergHadoopSaSelectAll >> GenericFederatedQuery::YdbFilterPushdown >> GenericFederatedQuery::IcebergHiveTokenSelectAll >> GenericFederatedQuery::IcebergHadoopBasicSelectAll >> GenericFederatedQuery::PostgreSQLOnPremSelectAll ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom [GOOD] Test command err: 2025-04-17T17:10:40.131346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:10:40.138272Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:10:40.138508Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/001e26/r3tmp/tmpGUqW8v/pdisk_1.dat 2025-04-17T17:10:41.935706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:10:41.987765Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:10:42.034631Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:10:42.034788Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:10:42.047443Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:10:42.141455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:10:42.436652Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertKqpStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } UpsertKqpStart { RowCount: 20 Inflight: 5 KeyFrom: 12345 } 2025-04-17T17:10:42.436767Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 2} Bootstrap called: RowCount: 20 Inflight: 5 KeyFrom: 12345 2025-04-17T17:10:42.440339Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 2} started# 5 actors each with inflight# 4 2025-04-17T17:10:42.440405Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 1} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-04-17T17:10:42.440457Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 2} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-04-17T17:10:42.440496Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 3} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-04-17T17:10:42.440519Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 4} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-04-17T17:10:42.440540Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 5} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-04-17T17:10:42.443115Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 1} session: ydb://session/3?node_id=1&id=OTM2Y2RjZWYtMTcxOGRmYmMtZmFhNWY0MmMtN2RmNDhiYTQ= 2025-04-17T17:10:42.444575Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 2} session: ydb://session/3?node_id=1&id=YTAxZWJjYTYtZTE1MjcyN2UtNmU3ZDM2NDgtYzZiNDdhOGM= 2025-04-17T17:10:42.445895Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 3} session: ydb://session/3?node_id=1&id=NTMzYjg2NTQtYTY3NGI3YjEtNjNiYTI5NjQtNTgzMjE4MGY= 2025-04-17T17:10:42.447041Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 4} session: ydb://session/3?node_id=1&id=NWM1NjJmZjUtYTEyZDAyOTktYzdlY2NlMmMtNDk0OGViYQ== 2025-04-17T17:10:42.448349Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 5} session: ydb://session/3?node_id=1&id=MWMxNTM5NjMtMzgwYmMzOGYtMjEwMmI0MjMtNmRkZGJkMDU= 2025-04-17T17:10:42.451976Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:752:2634], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:42.452067Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:779:2655], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:42.452128Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:780:2656], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:42.452183Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:781:2657], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:42.452235Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:782:2658], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:42.452283Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:783:2659], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:42.452342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:42.458606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T17:10:42.501791Z node 1 :TX_PROXY ERROR: Actor# [1:792:2668] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-17T17:10:42.502189Z node 1 :TX_PROXY ERROR: Actor# [1:800:2676] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-17T17:10:42.502693Z node 1 :TX_PROXY ERROR: Actor# [1:801:2677] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-17T17:10:42.502940Z node 1 :TX_PROXY ERROR: Actor# [1:802:2678] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-17T17:10:42.671287Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:790:2666], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T17:10:42.671370Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:791:2667], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T17:10:42.671432Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:793:2669], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T17:10:42.671492Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:794:2670], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T17:10:42.671554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:795:2671], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T17:10:42.711168Z node 1 :TX_PROXY ERROR: Actor# [1:900:2741] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:10:43.639308Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 2} finished in 1744909843.639264s, errors=0 2025-04-17T17:10:43.642651Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 2 { Tag: 2 DurationMs: 1744909843639 OperationsOK: 4 OperationsError: 0 } 2025-04-17T17:10:43.672613Z node 1 :TX_PROXY ERROR: Actor# [1:973:2779] txid# 281474976715668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:10:43.754431Z node 1 :TX_PROXY ERROR: Actor# [1:1001:2792] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:10:43.936244Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 1} finished in 1744909843.935992s, errors=0 2025-04-17T17:10:43.937088Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 1 { Tag: 1 DurationMs: 1744909843935 OperationsOK: 4 OperationsError: 0 } 2025-04-17T17:10:43.971001Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 5} finished in 1744909843.970503s, errors=0 2025-04-17T17:10:43.977317Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 5 { Tag: 5 DurationMs: 1744909843970 OperationsOK: 4 OperationsError: 0 } 2025-04-17T17:10:43.998211Z node 1 :TX_PROXY ERROR: Actor# [1:1072:2820] txid# 281474976715678, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:10:44.160939Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 3} finished in 1744909844.160728s, errors=0 2025-04-17T17:10:44.162891Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 3 { Tag: 3 DurationMs: 1744909844160 OperationsOK: 4 OperationsError: 0 } 2025-04-17T17:10:44.182601Z node 1 :TX_PROXY ERROR: Actor# [1:1123:2842] txid# 281474976715683, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:10:44.379290Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 4} finished in 1744909844.379057s, errors=0 2025-04-17T17:10:44.379962Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 4 { Tag: 4 DurationMs: 1744909844379 OperationsOK: 4 OperationsError: 0 } 2025-04-17T17:10:44.380312Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 2} finished in 1.939853s, oks# 20, errors# 0 2025-04-17T17:10:44.380994Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:739:2621] with tag# 2 |74.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest |74.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest >> GenericFederatedQuery::IcebergHadoopTokenSelectAll |74.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest |74.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOkWithNewApi [GOOD] >> TTxDataShardMiniKQL::WriteLargeExternalBlob [GOOD] >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOkNewApi [GOOD] >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOk |74.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |74.3%| [LD] {RESULT} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |74.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |74.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest |74.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::WriteLargeExternalBlob [GOOD] Test command err: 2025-04-17T17:10:26.491867Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:10:26.500292Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:10:26.500627Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:126:2151] 2025-04-17T17:10:26.500815Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:10:26.529577Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:10:26.533737Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:10:26.590250Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:10:26.592441Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-17T17:10:26.592535Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-17T17:10:26.592605Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-17T17:10:26.593004Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:10:26.593513Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:10:26.593610Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:189:2151] in generation 2 2025-04-17T17:10:26.593941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:10:26.594008Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:10:26.662621Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:10:26.683784Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-17T17:10:26.683961Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:10:26.684046Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-04-17T17:10:26.684072Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-17T17:10:26.684095Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-17T17:10:26.684139Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:10:26.684263Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:10:26.684303Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:10:26.684509Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-17T17:10:26.684581Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-17T17:10:26.684667Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T17:10:26.684690Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:10:26.684740Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-17T17:10:26.684778Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T17:10:26.684807Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T17:10:26.684834Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-17T17:10:26.684860Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T17:10:26.684936Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:10:26.684962Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:10:26.684998Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-04-17T17:10:26.686555Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:126:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nY\n\006table2\032\n\n\004key1\030\002 \"\032\013\n\004key2\030\200$ #\032\014\n\005value\030\200$ 8(\"(#:\010Z\006\010\000\030\000(\000J\014/Root/table2\222\002\013\th\020\000\000\000\000\000\000\020\016" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-17T17:10:26.686599Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:10:26.686660Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T17:10:26.686766Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-17T17:10:26.686817Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-17T17:10:26.686868Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-17T17:10:26.686897Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T17:10:26.686920Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-17T17:10:26.686955Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-17T17:10:26.686988Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T17:10:26.687176Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-17T17:10:26.687197Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-17T17:10:26.687218Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-17T17:10:26.687239Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-17T17:10:26.687270Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-17T17:10:26.687293Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-17T17:10:26.687313Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-17T17:10:26.687334Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-17T17:10:26.687359Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-17T17:10:26.698967Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T17:10:26.699018Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T17:10:26.699042Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-17T17:10:26.699081Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-17T17:10:26.699133Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-17T17:10:26.699521Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:10:26.699561Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:10:26.699601Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-04-17T17:10:26.699690Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:126:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-17T17:10:26.699715Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:10:26.699831Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-17T17:10:26.699863Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T17:10:26.699896Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-17T17:10:26.699922Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-17T17:10:26.702177Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-17T17:10:26.702221Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:10:26.702366Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:10:26.702397Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:10:26.702442Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T17:10:26.702489Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:10:26.702537Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T17:10:26.702564Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-17T17:10:26.702592Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-17T17:10:26.702632Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T17:10:26.702673Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-17T17:10:26.702721Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-17T17:10:26.702751Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-17T17:10:26.702869Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-17T17:10:26.702891Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T17:10:26.702905Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-17T17:10:26.702918Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-17T17:10:26.702931Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-17T17:10:26.702966Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T17:10:26.702984Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-17T17:10:26.703012Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T17:10:26.703035Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T17:10:26.703074Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-17T17:10:26.703109Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-17T17:10:26.703131Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at ... v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-17T17:10:43.391567Z node 3 :TX_DATASHARD TRACE: Activated operation [0:2] at 9437184 2025-04-17T17:10:43.391817Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-04-17T17:10:43.391836Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-17T17:10:43.391855Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit ExecuteDataTx 2025-04-17T17:10:43.391873Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-04-17T17:10:43.391911Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-17T17:10:43.392382Z node 3 :TX_DATASHARD TRACE: Operation [0:2] at 9437184 requested 33554432 more memory 2025-04-17T17:10:43.392409Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Restart 2025-04-17T17:10:43.392696Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T17:10:43.392933Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-04-17T17:10:43.393369Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-17T17:10:44.124094Z node 3 :TX_DATASHARD TRACE: Executed operation [0:2] at tablet 9437184 with status COMPLETE 2025-04-17T17:10:44.124396Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:2] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 7340039, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-17T17:10:44.124833Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T17:10:44.125048Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit ExecuteDataTx 2025-04-17T17:10:44.125305Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit FinishPropose 2025-04-17T17:10:44.125707Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit FinishPropose 2025-04-17T17:10:44.125799Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-17T17:10:44.125825Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit FinishPropose 2025-04-17T17:10:44.126204Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit CompletedOperations 2025-04-17T17:10:44.126235Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit CompletedOperations 2025-04-17T17:10:44.126466Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-04-17T17:10:44.126487Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit CompletedOperations 2025-04-17T17:10:44.126691Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:2] at 9437184 has finished 2025-04-17T17:10:44.444910Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T17:10:44.445160Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:2] at 9437184 on unit FinishPropose 2025-04-17T17:10:44.445397Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 2 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 5 ms, status: COMPLETE 2025-04-17T17:10:44.445850Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:10:47.223155Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [3:99:2134], Recipient [3:232:2225]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 99 RawX2: 12884904022 } 2025-04-17T17:10:47.223218Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2025-04-17T17:10:47.223503Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:297:2278], Recipient [3:232:2225]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:10:47.223531Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:10:47.223575Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:296:2277], serverId# [3:297:2278], sessionId# [0:0:0] 2025-04-17T17:10:47.400753Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:99:2134], Recipient [3:232:2225]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 99 RawX2: 12884904022 } TxBody: "\032\332\201\200\010\037\000\005\205\n\205\000\205\004?\000\205\002\202\0041\034MyReads MyWrites\205\004?\000\206\202\024Reply\024Write?\000?\000 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\004\203\004\203\001H\205\002\203\001H\01056$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\016\000\000\000\000\000\000\000\013?\024\003?\020\251\003\003?\022\006bar\003\005?\030\003?\026\007\000\000\000\001xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx 2025-04-17T17:10:47.403016Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:10:47.403184Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T17:10:47.464974Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit CheckDataTx 2025-04-17T17:10:47.465079Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is Executed 2025-04-17T17:10:47.465143Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 9437184 executing on unit CheckDataTx 2025-04-17T17:10:47.465190Z node 3 :TX_DATASHARD TRACE: Add [0:3] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T17:10:47.465230Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T17:10:47.465284Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-17T17:10:47.465353Z node 3 :TX_DATASHARD TRACE: Activated operation [0:3] at 9437184 2025-04-17T17:10:47.465396Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is Executed 2025-04-17T17:10:47.465422Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-17T17:10:47.465445Z node 3 :TX_DATASHARD TRACE: Add [0:3] at 9437184 to execution unit ExecuteDataTx 2025-04-17T17:10:47.465472Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit ExecuteDataTx 2025-04-17T17:10:47.465526Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-17T17:10:47.465571Z node 3 :TX_DATASHARD TRACE: Operation [0:3] at 9437184 requested 46269638 more memory 2025-04-17T17:10:47.465607Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is Restart 2025-04-17T17:10:47.465743Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T17:10:47.465785Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit ExecuteDataTx 2025-04-17T17:10:47.465835Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-17T17:10:47.499767Z node 3 :TX_DATASHARD TRACE: Operation [0:3] at 9437184 exceeded memory limit 50463942 and requests 403711536 more for the next try 2025-04-17T17:10:47.505986Z node 3 :TX_DATASHARD DEBUG: tx 3 released its data 2025-04-17T17:10:47.506110Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is Restart 2025-04-17T17:10:47.506488Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T17:10:47.506532Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit ExecuteDataTx 2025-04-17T17:10:47.571091Z node 3 :TX_DATASHARD DEBUG: tx 3 at 9437184 restored its data 2025-04-17T17:10:47.571185Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-17T17:10:47.657057Z node 3 :TX_DATASHARD TRACE: Executed operation [0:3] at tablet 9437184 with status COMPLETE 2025-04-17T17:10:47.657170Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:3] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 16777223, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-17T17:10:47.657255Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T17:10:47.657295Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 9437184 executing on unit ExecuteDataTx 2025-04-17T17:10:47.657338Z node 3 :TX_DATASHARD TRACE: Add [0:3] at 9437184 to execution unit FinishPropose 2025-04-17T17:10:47.657379Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit FinishPropose 2025-04-17T17:10:47.657426Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is DelayComplete 2025-04-17T17:10:47.657456Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 9437184 executing on unit FinishPropose 2025-04-17T17:10:47.657500Z node 3 :TX_DATASHARD TRACE: Add [0:3] at 9437184 to execution unit CompletedOperations 2025-04-17T17:10:47.657538Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit CompletedOperations 2025-04-17T17:10:47.657594Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is Executed 2025-04-17T17:10:47.657621Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 9437184 executing on unit CompletedOperations 2025-04-17T17:10:47.657660Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:3] at 9437184 has finished 2025-04-17T17:10:47.745180Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T17:10:47.745246Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:3] at 9437184 on unit FinishPropose 2025-04-17T17:10:47.745296Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 3 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 3 ms, status: COMPLETE 2025-04-17T17:10:47.745380Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:10:47.827111Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 9437184, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-17T17:10:47.827175Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 9437184, table# 1001, finished edge# 0, front# 0 2025-04-17T17:10:47.832753Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [3:229:2224], Recipient [3:232:2225]: NKikimr::TEvTablet::TEvFollowerGcApplied ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOkWithNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:76:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:79:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:78:2110] Leader for TabletID 72057594037927937 is [4:81:2111] sender: [4:82:2057] recipient: [4:78:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:81:2111] Leader for TabletID 72057594037927937 is [4:81:2111] sender: [4:135:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:77:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:79:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:81:2057] recipient: [5:80:2110] Leader for TabletID 72057594037927937 is [5:82:2111] sender: [5:83:2057] recipient: [5:80:2110] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:82:2111] Leader for TabletID 72057594037927937 is [5:82:2111] sender: [5:136:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:80:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:83:2057] recipient: [7:82:2113] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:85:2114] sender: [7:86:2057] recipient: [7:82:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:85:2114] Leader for TabletID 72057594037927937 is [7:85:2114] sender: [7:139:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:81:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:84:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:85:2057] recipient: [8:83:2113] Leader for TabletID 72057594037927937 is [8:86:2114] sender: [8:87:2057] recipient: [8:83:2113] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:86:2114] Leader for TabletID 72057594037927937 is [8:86:2114] sender: [8:104:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:83:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:86:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:85:2115] Leader for TabletID 72057594037927937 is [10:88:2116] sender: [10:89:2057] recipient: [10:85:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:88:2116] Leader for TabletID 72057594037927937 is [10:88:2116] sender: [10:142:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:84:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:87:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:88:2057] recipient: [11:86:2115] Leader for TabletID 72057594037927937 is [11:89:2116] sender: [11:90:2057] recipient: [11:86:2115] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:89:2116] Leader for TabletID 72057594037927937 is [11:89:2116] sender: [11:143:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:56:2097]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:85:2057] recipient: [12:36:2083] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:88:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:89:2057] recipient: [12:87:2116] Leader for TabletID 72057594037927937 is [12:90:2117] sender: [12:91:2057] recipient: [12:87:2116] !Reboot 72057594037927937 (actor [12:56:2097]) rebooted! !Reboot 72057594037927937 (actor [12:56:2097]) tablet resolver refreshed! new actor is[12:90:2117] Leader for TabletID 72057594037927937 is [12:90:2117] sender: [12:110:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:50:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:57:2057] recipient: [13:50:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:74:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:56:2097]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:86:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:89:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:90:2057] recipient: [13:88:2117] Leader for TabletID 72057594037927937 is [13:91:2118] sender: [13:92:2057] recipient: [13:88:2117] !Reboot 72057594037927937 (actor [13:56:2097]) rebooted! !Reboot 72057594037927937 (actor [13:56:2097]) tablet resolver refreshed! new actor is[13:91:2118] Leader for TabletID 72057594037927937 is [13:91:2118] sender: [13:111:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:57:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:74:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:89:2057] recipient: [14:36:2083] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:92:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:93:2057] recipient: [14:91:2120] Leader for TabletID 72057594037927937 is [14:94:2121] sender: [14:95:2057] recipient: [14:91:2120] !Reboot 72057594037927937 (actor [14:56:2097]) rebooted! !Reboot 72057594037927937 (actor [14:56:2097]) tablet resolver refreshed! new actor is[14:94:2121] Leader for TabletID 72057594037927937 is [14:94:2121] sender: [14:148:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:57:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:74:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:89:2057] recipient: [15:36:2083] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:92:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:93:2057] recipient: [15:91:2120] Leader for TabletID 72057594037927937 is [15:94:2121] sender: [15:95:2057] recipient: [15:91:2120] !Reboot 72057594037927937 (actor [15:56:2097]) rebooted! !Reboot 72057594037927937 (actor [15:56:2097]) tablet resolver refreshed! new actor is[15:94:2121] Leader for TabletID 72057594037927937 is [15:94:2121] sender: [15:148:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] |74.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest |74.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorks [GOOD] >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorksNewApi |74.3%| [LD] {RESULT} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |74.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |74.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |74.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |74.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |74.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest >> TKeyValueTest::TestBasicWriteReadOverrun [GOOD] >> TKeyValueTest::TestBlockedEvGetRequest >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOk [GOOD] >> TTxDataShardMiniKQL::CrossShard_1_Cycle [GOOD] >> TTxDataShardMiniKQL::CrossShard_2_SwapAndCopy >> TKeyValueTest::TestWriteReadWhileWriteWorks >> ColumnBuildTest::BaseCase >> TKeyValueTest::TestCopyRangeWorks [GOOD] >> TKeyValueTest::TestCopyRangeWorksNewApi >> TKeyValueTest::TestBlockedEvGetRequest [GOOD] >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOkNewApi >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorks [GOOD] >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorksNewApi ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestBlockedEvGetRequest [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:76:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:79:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:80:2057] recipient: [10:78:2110] Leader for TabletID 72057594037927937 is [10:81:2111] sender: [10:82:2057] recipient: [10:78:2110] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:81:2111] Leader for TabletID 72057594037927937 is [10:81:2111] sender: [10:135:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:76:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:79:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:80:2057] recipient: [11:78:2110] Leader for TabletID 72057594037927937 is [11:81:2111] sender: [11:82:2057] recipient: [11:78:2110] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:81:2111] Leader for TabletID 72057594037927937 is [11:81:2111] sender: [11:135:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:77:2057] recipient: [12:36:2083] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:80:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:81:2057] recipient: [12:79:2110] Leader for TabletID 72057594037927937 is [12:82:2111] sender: [12:83:2057] recipient: [12:79:2110] !Reboot 72057594037927937 (actor [12:56:2097]) rebooted! !Reboot 72057594037927937 (actor [12:56:2097]) tablet resolver refreshed! new actor is[12:82:2111] Leader for TabletID 72057594037927937 is [12:82:2111] sender: [12:136:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:50:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:57:2057] recipient: [13:50:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:74:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:80:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:82:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:84:2057] recipient: [13:83:2113] Leader for TabletID 72057594037927937 is [13:85:2114] sender: [13:86:2057] recipient: [13:83:2113] !Reboot 72057594037927937 (actor [13:56:2097]) rebooted! !Reboot 72057594037927937 (actor [13:56:2097]) tablet resolver refreshed! new actor is[13:85:2114] Leader for TabletID 72057594037927937 is [13:85:2114] sender: [13:139:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:57:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:74:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:80:2057] recipient: [14:36:2083] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:83:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:84:2057] recipient: [14:82:2113] Leader for TabletID 72057594037927937 is [14:85:2114] sender: [14:86:2057] recipient: [14:82:2113] !Reboot 72057594037927937 (actor [14:56:2097]) rebooted! !Reboot 72057594037927937 (actor [14:56:2097]) tablet resolver refreshed! new actor is[14:85:2114] Leader for TabletID 72057594037927937 is [14:85:2114] sender: [14:139:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:57:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:74:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:81:2057] recipient: [15:36:2083] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:84:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:85:2057] recipient: [15:83:2113] Leader for TabletID 72057594037927937 is [15:86:2114] sender: [15:87:2057] recipient: [15:83:2113] !Reboot 72057594037927937 (actor [15:56:2097]) rebooted! !Reboot 72057594037927937 (actor [15:56:2097]) tablet resolver refreshed! new actor is[15:86:2114] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] 2025-04-17T17:10:50.742766Z node 17 :KEYVALUE ERROR: {KV323@keyvalue_storage_read_request.cpp:254} Received BLOCKED EvGetResult. KeyValue# 72057594037927937 Status# BLOCKED Deadline# 18446744073709551 Now# 0 SentAt# 1970-01-01T00:00:00.000000Z GotAt# 0 ErrorReason# block race detected 2025-04-17T17:10:50.761327Z node 17 :TABLET_MAIN ERROR: Tablet: 72057594037927937 HandleBlockBlobStorageResult, msg->Status: ALREADY, not discovered Marker# TSYS21 2025-04-17T17:10:50.761574Z node 17 :TABLET_MAIN ERROR: Tablet: 72057594037927937 Type: KeyValue, EReason: ReasonBootBSError, SuggestedGeneration: 0, KnownGeneration: 3 Marker# TSYS31 >> TKeyValueTest::TestObtainLockNewApi [GOOD] >> TKeyValueTest::TestRenameToLongKey >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEventsWithSlowInitialGC |74.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest |74.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> UpsertLoad::ShouldDropCreateTable [GOOD] >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi |74.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TTransferTests::Create >> ColumnBuildTest::BuildColumnDoesnotRestoreDeletedRows >> TTransferTests::Create_Disabled ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldDropCreateTable [GOOD] Test command err: 2025-04-17T17:10:41.546289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:10:41.546719Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:10:41.546766Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/001dc8/r3tmp/tmpNJCK4s/pdisk_1.dat 2025-04-17T17:10:42.029753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:10:42.067277Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:10:42.105503Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:10:42.105645Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:10:42.119321Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:10:42.199787Z node 1 :DS_LOAD_TEST NOTICE: TLoad# 0 creates table# BrandNewTable in dir# /Root 2025-04-17T17:10:42.387740Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:645:2552], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:42.387995Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:42.403447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:10:42.781744Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# BrandNewTable in dir# /Root with rows# 10 2025-04-17T17:10:42.783220Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:641:2549], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "BrandNewTable" 2025-04-17T17:10:42.813407Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:641:2549], subTag: 1} TUpsertActor finished in 0.029898s, errors=0 2025-04-17T17:10:42.813655Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "BrandNewTable" CreateTable: true MinParts: 11 MaxParts: 13 MaxPartSizeMb: 1234 } TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "BrandNewTable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2025-04-17T17:10:42.813775Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:641:2549], subTag: 3} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "BrandNewTable" 2025-04-17T17:10:42.881834Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:641:2549], subTag: 3} TUpsertActor finished in 0.067655s, errors=0 2025-04-17T17:10:42.881903Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:754:2630] with tag# 3 2025-04-17T17:10:50.327034Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:10:50.328469Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:10:50.328963Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/001dc8/r3tmp/tmpYMy8dO/pdisk_1.dat 2025-04-17T17:10:51.279614Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:10:51.331297Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:10:51.386948Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:10:51.387428Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:10:51.405012Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:10:51.522066Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:10:51.863958Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 } UpsertBulkStart { RowCount: 100 Inflight: 3 } 2025-04-17T17:10:51.864087Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:738:2620], subTag: 2} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 2 2025-04-17T17:10:52.484522Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:738:2620], subTag: 2} TUpsertActor finished in 0.619790s, errors=0 2025-04-17T17:10:52.484610Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:739:2621] with tag# 2 2025-04-17T17:10:52.488787Z node 2 :DS_LOAD_TEST NOTICE: TLoad# 0 drops table# table in dir# /Root 2025-04-17T17:10:52.497466Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:781:2662], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:52.497546Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:52.790271Z node 2 :DS_LOAD_TEST NOTICE: TLoad# 0 creates table# table in dir# /Root 2025-04-17T17:10:52.819971Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:847:2708], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:52.820049Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:52.850581Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T17:10:52.932538Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-04-17T17:10:53.251276Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# table in dir# /Root with rows# 10 2025-04-17T17:10:53.252549Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:777:2659], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037889 TableId: 3 WorkingDir: "/Root" TableName: "table" 2025-04-17T17:10:53.274487Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:777:2659], subTag: 1} TUpsertActor finished in 0.019526s, errors=0 2025-04-17T17:10:53.275542Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "table" DropTable: true } TargetShard { TabletId: 72075186224037889 TableId: 3 WorkingDir: "/Root" TableName: "table" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2025-04-17T17:10:53.276280Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:777:2659], subTag: 3} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037889 TableId: 3 WorkingDir: "/Root" TableName: "table" 2025-04-17T17:10:53.348208Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:777:2659], subTag: 3} TUpsertActor finished in 0.071287s, errors=0 2025-04-17T17:10:53.348293Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:940:2782] with tag# 3 >> TKeyValueTest::TestWrite200KDeleteThenResponseError [GOOD] >> TKeyValueTest::TestSetExecutorFastLogPolicy >> TKeyValueTest::TestConcatWorks [GOOD] >> TKeyValueTest::TestConcatWorksNewApi >> TKesusTest::TestAcquireBeforeTimeoutViaSessionTimeout [GOOD] >> TKesusTest::TestAcquireSemaphore >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEventsWithSlowInitialGC [GOOD] >> TKeyValueTest::TestWrite200KDeleteThenResponseErrorNewApi [GOOD] >> TKeyValueTest::TestWriteDeleteThenReadRemaining ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEventsWithSlowInitialGC [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:86:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:85:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:88:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:89:2057] recipient: [11:87:2116] Leader for TabletID 72057594037927937 is [11:90:2117] sender: [11:91:2057] recipient: [11:87:2116] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:90:2117] Leader for TabletID 72057594037927937 is [11:90:2117] sender: [11:110:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:56:2097]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:86:2057] recipient: [12:36:2083] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:89:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:90:2057] recipient: [12:88:2117] Leader for TabletID 72057594037927937 is [12:91:2118] sender: [12:92:2057] recipient: [12:88:2117] !Reboot 72057594037927937 (actor [12:56:2097]) rebooted! !Reboot 72057594037927937 (actor [12:56:2097]) tablet resolver refreshed! new actor is[12:91:2118] Leader for TabletID 72057594037927937 is [12:91:2118] sender: [12:111:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:50:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:57:2057] recipient: [13:50:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:74:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:89:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:92:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:93:2057] recipient: [13:91:2120] Leader for TabletID 72057594037927937 is [13:94:2121] sender: [13:95:2057] recipient: [13:91:2120] !Reboot 72057594037927937 (actor [13:56:2097]) rebooted! !Reboot 72057594037927937 (actor [13:56:2097]) tablet resolver refreshed! new actor is[13:94:2121] Leader for TabletID 72057594037927937 is [13:94:2121] sender: [13:148:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:57:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:74:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:89:2057] recipient: [14:36:2083] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:92:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:93:2057] recipient: [14:91:2120] Leader for TabletID 72057594037927937 is [14:94:2121] sender: [14:95:2057] recipient: [14:91:2120] !Reboot 72057594037927937 (actor [14:56:2097]) rebooted! !Reboot 72057594037927937 (actor [14:56:2097]) tablet resolver refreshed! new actor is[14:94:2121] Leader for TabletID 72057594037927937 is [14:94:2121] sender: [14:148:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:57:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:74:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:90:2057] recipient: [15:36:2083] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:93:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:94:2057] recipient: [15:92:2120] Leader for TabletID 72057594037927937 is [15:95:2121] sender: [15:96:2057] recipient: [15:92:2120] !Reboot 72057594037927937 (actor [15:56:2097]) rebooted! !Reboot 72057594037927937 (actor [15:56:2097]) tablet resolver refreshed! new actor is[15:95:2121] Leader for TabletID 72057594037927937 is [15:95:2121] sender: [15:149:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] |74.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi |74.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TKesusTest::TestAcquireSemaphore [GOOD] >> TPQCachingProxyTest::TestDeregister >> TKesusTest::TestAcquireSemaphoreRebootTimeout [GOOD] >> TKesusTest::TestAcquireSemaphoreViaDecrease >> TTransferTests::Create_Disabled [GOOD] >> TTransferTests::CreateWithoutCredentials >> TPQCachingProxyTest::MultipleSessions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphore [GOOD] Test command err: 2025-04-17T17:10:04.096862Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:04.097535Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:04.145148Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:04.146264Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:04.193825Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:04.194326Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=7546522789402197796, session=0, seqNo=0) 2025-04-17T17:10:04.194462Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:10:04.209691Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=7546522789402197796, session=1) 2025-04-17T17:10:04.210652Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=6858715276732730219, session=0, seqNo=0) 2025-04-17T17:10:04.210987Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-17T17:10:04.232614Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=6858715276732730219, session=2) 2025-04-17T17:10:04.238692Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:132:2158], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-17T17:10:04.239921Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-17T17:10:04.240564Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-17T17:10:04.241118Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:132:2158], cookie=222, session=2, semaphore="Lock2" count=1) 2025-04-17T17:10:04.241165Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2025-04-17T17:10:04.241495Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 2 2025-04-17T17:10:04.241565Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:132:2158], cookie=333, session=1, semaphore="Lock2" count=1) 2025-04-17T17:10:04.241757Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #3 session 1 2025-04-17T17:10:04.259456Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:132:2158], cookie=111) 2025-04-17T17:10:04.259760Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:132:2158], cookie=222) 2025-04-17T17:10:04.259781Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:132:2158], cookie=333) 2025-04-17T17:10:04.262464Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:148:2172], cookie=4858912079973855306, name="Lock1") 2025-04-17T17:10:04.262770Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:148:2172], cookie=4858912079973855306) 2025-04-17T17:10:04.265226Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:151:2175], cookie=13841892939087130080, name="Lock2") 2025-04-17T17:10:04.265486Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:151:2175], cookie=13841892939087130080) 2025-04-17T17:10:04.322513Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:04.322897Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:04.328087Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:04.332149Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:04.387336Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:04.388141Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-17T17:10:04.388308Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 2 2025-04-17T17:10:04.388446Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #3 session 1 2025-04-17T17:10:04.389367Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:191:2205], cookie=16734793176076378805, name="Lock1") 2025-04-17T17:10:04.389720Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:191:2205], cookie=16734793176076378805) 2025-04-17T17:10:04.392041Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:199:2212], cookie=757245348098769154, name="Lock2") 2025-04-17T17:10:04.392255Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:199:2212], cookie=757245348098769154) 2025-04-17T17:10:04.925406Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:04.962337Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:05.363262Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:05.378988Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:05.722675Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:05.734457Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:06.092986Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:06.104368Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:06.479303Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:06.492247Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:06.844300Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:06.855767Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:07.211783Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:07.228933Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:07.592571Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:07.623709Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:08.007649Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:08.025511Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:08.469386Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:08.481189Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:08.904730Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:08.918197Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:09.315023Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:09.332817Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:09.709489Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:09.728858Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:10.108803Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:10.123249Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:10.588905Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:10.609063Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:11.012382Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:11.025161Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:11.404359Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:11.425186Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:11.799846Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:11.816033Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:12.212685Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:12.226074Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:12.672355Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:12.690658Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:13.100325Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:13.120478Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:13.557238Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:13.588090Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:14.016374Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:14.030114Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:14.418686Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:14.436745Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:14.850980Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:14.869596Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:15.264613Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:15.288260Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:15.673478Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:15.687834Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:16.091032Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:16.112967Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:16.524377Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:16.537454Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:16.981452Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:365:2367], cookie=7762898006094520751, name="Lock1") 2025-04-17T17:10:16.981569Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:365:2367], cookie=7762898006094520751) 2025-04-17T17:10:16.982169Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:368:2370], cookie=9929886724436662360, name="Lock2") 2025-04-17T17:10:16.982243Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:368:2370], cookie=9929886724436662360) 2025-04-17T17:10:17.038450Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:17.063490Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:17.488409Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:17.512638Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:17.915082Z node 1 :KESUS_TABLET DEBUG: [720575940379 ... 17T17:10:46.911286Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:47.280175Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:47.292809Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:47.677999Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:47.692828Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:48.078514Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:48.092793Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:48.456843Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:48.471352Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:48.859727Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:48.876595Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:49.272043Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:49.292906Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:49.688478Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:49.709448Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:50.124446Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:50.148676Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:50.576362Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:50.590743Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:51.028469Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:51.043658Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:51.472464Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:51.488993Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:51.892400Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:51.912324Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:52.323109Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:52.348079Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:52.740040Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:52.755441Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:53.144602Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:53.158076Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:53.542346Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:53.561422Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:53.954576Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:53.969898Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:54.372596Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:54.385343Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:54.767538Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:54.787680Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:55.211558Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-04-17T17:10:55.211814Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-04-17T17:10:55.212041Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2025-04-17T17:10:55.212526Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-04-17T17:10:55.212833Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 2 "Lock2" owner link 2025-04-17T17:10:55.212860Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-04-17T17:10:55.243246Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-04-17T17:10:55.245902Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:331:2344], cookie=15850419025376181334, name="Lock1") 2025-04-17T17:10:55.246187Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:331:2344], cookie=15850419025376181334) 2025-04-17T17:10:55.257857Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:334:2347], cookie=7985979010727077041, name="Lock2") 2025-04-17T17:10:55.258342Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:334:2347], cookie=7985979010727077041) 2025-04-17T17:10:55.261063Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:337:2350], cookie=15033261595268564656) 2025-04-17T17:10:55.261295Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:337:2350], cookie=15033261595268564656) 2025-04-17T17:10:55.312311Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:55.312579Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:55.316944Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:55.319407Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:55.377551Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:55.378076Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-04-17T17:10:55.378347Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-04-17T17:10:55.379769Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:377:2380], cookie=12136481376460629598) 2025-04-17T17:10:55.380067Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:377:2380], cookie=12136481376460629598) 2025-04-17T17:10:55.386823Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:384:2386], cookie=6433740712269294950, name="Lock1") 2025-04-17T17:10:55.387172Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:384:2386], cookie=6433740712269294950) 2025-04-17T17:10:55.390454Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:387:2389], cookie=9280129071495624146, name="Lock2") 2025-04-17T17:10:55.390923Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:387:2389], cookie=9280129071495624146) 2025-04-17T17:10:58.983120Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:58.983448Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:59.025737Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:59.027414Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:59.075016Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:59.077209Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=14681425935507039986, session=0, seqNo=0) 2025-04-17T17:10:59.078002Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:10:59.091978Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=14681425935507039986, session=1) 2025-04-17T17:10:59.093218Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=8764262018965838487, session=0, seqNo=0) 2025-04-17T17:10:59.093757Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-17T17:10:59.134632Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=8764262018965838487, session=2) 2025-04-17T17:10:59.136060Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=111, session=1, semaphore="Sem1" count=1) 2025-04-17T17:10:59.171589Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=111) 2025-04-17T17:10:59.174216Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:145:2169], cookie=16204769341069436483, name="Sem1", limit=1) 2025-04-17T17:10:59.174793Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-04-17T17:10:59.202701Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:145:2169], cookie=16204769341069436483) 2025-04-17T17:10:59.204949Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=333, session=1, semaphore="Sem1" count=100500) 2025-04-17T17:10:59.234133Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=333) 2025-04-17T17:10:59.235639Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=222, session=1, semaphore="Sem1" count=1) 2025-04-17T17:10:59.245689Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-04-17T17:10:59.246676Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=333, session=2, semaphore="Sem1" count=1) 2025-04-17T17:10:59.279410Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=222) 2025-04-17T17:10:59.279714Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=333) 2025-04-17T17:10:59.282252Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:155:2179], cookie=2492468893415055787, name="Sem1") 2025-04-17T17:10:59.282549Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:155:2179], cookie=2492468893415055787) 2025-04-17T17:10:59.307989Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:158:2182], cookie=10904855920008633720, name="Sem1") 2025-04-17T17:10:59.308574Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:158:2182], cookie=10904855920008633720) 2025-04-17T17:10:59.318882Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:161:2185], cookie=14687379927608581784, name="Sem1", force=0) 2025-04-17T17:10:59.349687Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:161:2185], cookie=14687379927608581784) 2025-04-17T17:10:59.352058Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:166:2190], cookie=9183445064688220648, name="Sem1", force=1) 2025-04-17T17:10:59.356533Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Deleting semaphore 1 "Sem1" 2025-04-17T17:10:59.389649Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:166:2190], cookie=9183445064688220648) >> TTransferTests::Create [GOOD] >> TTransferTests::CreateSequential >> TKeyValueTest::TestCleanUpDataOnEmptyTablet [GOOD] >> TKeyValueTest::TestCleanUpDataOnEmptyTabletResetGeneration >> KqpStreamLookup::ReadTableWithIndexDuringSplit [GOOD] >> TKesusTest::TestAcquireSemaphoreViaDecrease [GOOD] >> TKeyValueTest::TestCleanUpDataOnEmptyTabletResetGeneration [GOOD] >> TKeyValueTest::TestCleanUpDataWithMockDisk >> TKeyValueTest::TestLargeWriteAndDelete [GOOD] >> DataShardWrite::UpsertPrepared+Volatile >> TPQCachingProxyTest::MultipleSessions [GOOD] >> TPQCachingProxyTest::TestDeregister [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphoreViaDecrease [GOOD] Test command err: 2025-04-17T17:10:05.952984Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:05.953073Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:05.969978Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:05.970209Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:05.994061Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:05.994489Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=7853816623663386640, session=0, seqNo=0) 2025-04-17T17:10:05.994597Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:10:06.016776Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=7853816623663386640, session=1) 2025-04-17T17:10:06.017069Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=18123802628554962509, session=0, seqNo=0) 2025-04-17T17:10:06.017181Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-17T17:10:06.028887Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=18123802628554962509, session=2) 2025-04-17T17:10:06.029368Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[1:143:2167], cookie=10020210292832631695, name="Sem1", limit=1) 2025-04-17T17:10:06.029488Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-04-17T17:10:06.041038Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[1:143:2167], cookie=10020210292832631695) 2025-04-17T17:10:06.041299Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:132:2158], cookie=111, session=1, semaphore="Sem1" count=1) 2025-04-17T17:10:06.041431Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-04-17T17:10:06.041583Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:132:2158], cookie=222, session=2, semaphore="Sem1" count=1) 2025-04-17T17:10:06.053135Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:132:2158], cookie=111) 2025-04-17T17:10:06.053220Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:132:2158], cookie=222) 2025-04-17T17:10:06.053679Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:151:2175], cookie=1260697947589309604, name="Sem1") 2025-04-17T17:10:06.053792Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:151:2175], cookie=1260697947589309604) 2025-04-17T17:10:06.054085Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:154:2178], cookie=9082787185300468693, name="Sem1") 2025-04-17T17:10:06.054123Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:154:2178], cookie=9082787185300468693) 2025-04-17T17:10:06.473163Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:06.486539Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:06.835554Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:06.852435Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:07.227540Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:07.247181Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:07.607504Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:07.626670Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:08.044552Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:08.069340Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:08.452423Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:08.464817Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:08.840437Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:08.865152Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:09.263212Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:09.275055Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:09.640855Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:09.660979Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:10.098787Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:10.110450Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:10.516550Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:10.540686Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:10.946127Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:10.961903Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:11.327935Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:11.343553Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:11.716959Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:11.735112Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:12.163419Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:12.179477Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:12.600580Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:12.633232Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:13.072875Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:13.112648Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:13.574650Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:13.604710Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:14.012356Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:14.032641Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:14.460156Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:14.478527Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:15.000753Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:15.041147Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:15.442740Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:15.469255Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:15.864292Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:15.880780Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:16.308378Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:16.321362Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:16.736448Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:16.748645Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:17.196005Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:17.218254Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:17.631819Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:17.652141Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:18.039892Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:18.055091Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:18.440479Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:18.455894Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:18.852053Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:18.864733Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:19.214720Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:19.226800Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:19.588900Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:19.601584Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:19.956187Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:19.979801Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:20.392431Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:20.413763Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:20.804462Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:20.823732Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:21.247036Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:21.273621Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:21.705686Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:21.728563Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:22.140819Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:22.161863Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:22.573020Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:22.593441Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:23.046776Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:23.069370Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:23.481924Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:23.517715Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:23.928949Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:23.954946Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:24.376786Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:24.401505Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:24.817016Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:24.845203Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:25.294011Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:25.3132 ... UG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:49.565549Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:49.971391Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:49.994110Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:50.429015Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:50.459168Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:50.932552Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:50.961268Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:51.369176Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:51.386178Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:51.767845Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:51.789154Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:52.192593Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:52.209001Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:52.616381Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:52.636806Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:53.046323Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:53.066240Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:53.458948Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:53.473277Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:53.878215Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:53.900693Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:54.306413Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:54.325093Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:54.719150Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:54.744145Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:55.224020Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:55.243238Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:55.639376Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:55.656714Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:56.089322Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:56.108655Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:56.525053Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:56.553132Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:56.958064Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:56.973280Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:57.389248Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:57.411347Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:57.844731Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:57.861130Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:58.261010Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:58.288649Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:58.681485Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:58.710125Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:59.156460Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:59.180701Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:59.616990Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-04-17T17:10:59.617291Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-04-17T17:10:59.617549Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2025-04-17T17:10:59.641339Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-04-17T17:10:59.659897Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:410:2409], cookie=6459951238828979180, name="Sem1") 2025-04-17T17:10:59.664490Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:410:2409], cookie=6459951238828979180) 2025-04-17T17:11:03.766191Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:11:03.766282Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:11:03.804924Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:11:03.811623Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:11:03.861115Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:11:03.862353Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=9848984943936085213, session=0, seqNo=0) 2025-04-17T17:11:03.863121Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:11:03.881742Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=9848984943936085213, session=1) 2025-04-17T17:11:03.882994Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=7219517928464678519, session=0, seqNo=0) 2025-04-17T17:11:03.883573Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-17T17:11:03.906298Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=7219517928464678519, session=2) 2025-04-17T17:11:03.907518Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=7789702531724235761, session=0, seqNo=0) 2025-04-17T17:11:03.908025Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 3 2025-04-17T17:11:03.932001Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=7789702531724235761, session=3) 2025-04-17T17:11:03.936406Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:145:2169], cookie=322493091121483437, name="Sem1", limit=3) 2025-04-17T17:11:03.937445Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-04-17T17:11:03.961341Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:145:2169], cookie=322493091121483437) 2025-04-17T17:11:03.962332Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=111, session=1, semaphore="Sem1" count=2) 2025-04-17T17:11:03.963192Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-04-17T17:11:03.963714Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=222, session=2, semaphore="Sem1" count=1) 2025-04-17T17:11:03.963770Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2025-04-17T17:11:03.963837Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=333, session=3, semaphore="Sem1" count=1) 2025-04-17T17:11:04.001603Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=111) 2025-04-17T17:11:04.001864Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=222) 2025-04-17T17:11:04.001889Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=333) 2025-04-17T17:11:04.004166Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:153:2177], cookie=3364226484522839092, name="Sem1") 2025-04-17T17:11:04.004636Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:153:2177], cookie=3364226484522839092) 2025-04-17T17:11:04.006855Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:156:2180], cookie=18173278766220224159, name="Sem1") 2025-04-17T17:11:04.007311Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:156:2180], cookie=18173278766220224159) 2025-04-17T17:11:04.008507Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=444, session=1, semaphore="Sem1" count=1) 2025-04-17T17:11:04.009203Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2025-04-17T17:11:04.028177Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=444) 2025-04-17T17:11:04.030528Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:161:2185], cookie=2854766113172688344, name="Sem1") 2025-04-17T17:11:04.031068Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:161:2185], cookie=2854766113172688344) 2025-04-17T17:11:04.033390Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:164:2188], cookie=5316235101477597970, name="Sem1") 2025-04-17T17:11:04.033876Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:164:2188], cookie=5316235101477597970) 2025-04-17T17:11:04.068439Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:11:04.068916Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:11:04.073795Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:11:04.075707Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:11:04.146565Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:11:04.147342Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-04-17T17:11:04.147748Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2025-04-17T17:11:04.147770Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2025-04-17T17:11:04.148554Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:204:2218], cookie=7735668237472216842, name="Sem1") 2025-04-17T17:11:04.149017Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:204:2218], cookie=7735668237472216842) 2025-04-17T17:11:04.150318Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:213:2226], cookie=11460866840759488143, name="Sem1") 2025-04-17T17:11:04.150381Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:213:2226], cookie=11460866840759488143) >> TTransferTests::CreateWithoutCredentials [GOOD] >> TTransferTests::CreateWrongConfig |74.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorksNewApi [GOOD] |74.4%| [LD] {RESULT} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestLargeWriteAndDelete [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:76:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:79:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:80:2057] recipient: [10:78:2110] Leader for TabletID 72057594037927937 is [10:81:2111] sender: [10:82:2057] recipient: [10:78:2110] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:81:2111] Leader for TabletID 72057594037927937 is [10:81:2111] sender: [10:135:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:76:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:79:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:80:2057] recipient: [11:78:2110] Leader for TabletID 72057594037927937 is [11:81:2111] sender: [11:82:2057] recipient: [11:78:2110] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:81:2111] Leader for TabletID 72057594037927937 is [11:81:2111] sender: [11:135:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:77:2057] recipient: [12:36:2083] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:80:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:81:2057] recipient: [12:79:2110] Leader for TabletID 72057594037927937 is [12:82:2111] sender: [12:83:2057] recipient: [12:79:2110] !Reboot 72057594037927937 (actor [12:56:2097]) rebooted! !Reboot 72057594037927937 (actor [12:56:2097]) tablet resolver refreshed! new actor is[12:82:2111] Leader for TabletID 72057594037927937 is [12:82:2111] sender: [12:136:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:50:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:57:2057] recipient: [13:50:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:74:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:80:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:83:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:84:2057] recipient: [13:82:2113] Leader for TabletID 72057594037927937 is [13:85:2114] sender: [13:86:2057] recipient: [13:82:2113] !Reboot 72057594037927937 (actor [13:56:2097]) rebooted! !Reboot 72057594037927937 (actor [13:56:2097]) tablet resolver refreshed! new actor is[13:85:2114] Leader for TabletID 72057594037927937 is [13:85:2114] sender: [13:139:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:57:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:74:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:80:2057] recipient: [14:36:2083] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:83:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:84:2057] recipient: [14:82:2113] Leader for TabletID 72057594037927937 is [14:85:2114] sender: [14:86:2057] recipient: [14:82:2113] !Reboot 72057594037927937 (actor [14:56:2097]) rebooted! !Reboot 72057594037927937 (actor [14:56:2097]) tablet resolver refreshed! new actor is[14:85:2114] Leader for TabletID 72057594037927937 is [14:85:2114] sender: [14:139:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:57:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:74:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:81:2057] recipient: [15:36:2083] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:84:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:85:2057] recipient: [15:83:2113] Leader for TabletID 72057594037927937 is [15:86:2114] sender: [15:87:2057] recipient: [15:83:2113] !Reboot 72057594037927937 (actor [15:56:2097]) rebooted! !Reboot 72057594037927937 (actor [15:56:2097]) tablet resolver refreshed! new actor is[15:86:2114] Leader for TabletID 72057594037927937 is [15:86:2114] sender: [15:140:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:84:2057] recipient: [16:36:2083] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:87:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:88:2057] recipient: [16:86:2116] Leader for TabletID 72057594037927937 is [16:89:2117] sender: [16:90:2057] recipient: [16:86:2116] !Reboot 72057594037927937 (actor [16:56:2097]) rebooted! !Reboot 72057594037927937 (actor [16:56:2097]) tablet resolver refreshed! new actor is[16:89:2117] Leader for TabletID 72057594037927937 is [16:89:2117] sender: [16:143:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:57:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:74:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:84:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:87:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:88:2057] recipient: [17:86:2116] Leader for TabletID 72057594037927937 is [17:89:2117] sender: [17:90:2057] recipient: [17:86:2116] !Reboot 72057594037927937 (actor [17:56:2097]) rebooted! !Reboot 72057594037927937 (actor [17:56:2097]) tablet resolver refreshed! new actor is[17:89:2117] Leader for TabletID 72057594037927937 is [17:89:2117] sender: [17:143:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:57:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:74:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:85:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:88:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:89:2057] recipient: [18:87:2116] Leader for TabletID 72057594037927937 is [18:90:2117] sender: [18:91:2057] recipient: [18:87:2116] !Reboot 72057594037927937 (actor [18:56:2097]) rebooted! !Reboot 72057594037927937 (actor [18:56:2097]) tablet resolver refreshed! new actor is[18:90:2117] Leader for TabletID 72057594037927937 is [18:90:2117] sender: [18:144:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:52:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:57:2057] recipient: [19:52:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:74:2057] recipient: [19:14:2061] |74.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::MultipleSessions [GOOD] Test command err: 2025-04-17T17:11:04.697046Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:11:04.697130Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:11:04.722681Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:11:04.722764Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 1 2025-04-17T17:11:04.722853Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 1 for session: session1 2025-04-17T17:11:04.722899Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 2 for session: session1 2025-04-17T17:11:04.722931Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1, Generation: 1 2025-04-17T17:11:04.723010Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 2 for session session1, Generation: 1 2025-04-17T17:11:04.723066Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session2:1 with generation 2 2025-04-17T17:11:04.723113Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 3 for session: session2 2025-04-17T17:11:04.723187Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 3 for session session2, Generation: 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestDeregister [GOOD] Test command err: 2025-04-17T17:11:04.377537Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:11:04.377661Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:11:04.408207Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:11:04.408284Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 1 2025-04-17T17:11:04.408337Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session2:1 with generation 1 2025-04-17T17:11:04.408453Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: session1 >> DataShardWrite::UpsertImmediate >> TKesusTest::TestSessionTimeoutAfterReboot [GOOD] >> TKesusTest::TestSessionStealingSameKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp/unittest >> KqpStreamLookup::ReadTableWithIndexDuringSplit [GOOD] Test command err: 2025-04-17T17:10:41.574663Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:10:41.575123Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:10:41.575178Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/0021dc/r3tmp/tmp87s1rY/pdisk_1.dat 2025-04-17T17:10:42.036971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:10:42.089005Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:10:42.129066Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:10:42.129188Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:10:42.140466Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:10:42.223764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:10:42.625955Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:790:2660], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:42.626092Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:800:2665], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:42.626168Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:42.651172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T17:10:42.943193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:804:2668], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T17:10:43.061027Z node 1 :TX_PROXY ERROR: Actor# [1:882:2715] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:11:01.851373Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2bpe607gvwwhkxhdpzbjjh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWE3ZjcyMzEtOTZhNWRiNWEtZTA3ZjE0OWQtMTE1NzI0YWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:11:02.010802Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2bpe607gvwwhkxhdpzbjjh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWE3ZjcyMzEtOTZhNWRiNWEtZTA3ZjE0OWQtMTE1NzI0YWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:11:02.497397Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2bpe607gvwwhkxhdpzbjjh", SessionId: ydb://session/3?node_id=1&id=YWE3ZjcyMzEtOTZhNWRiNWEtZTA3ZjE0OWQtMTE1NzI0YWQ=, Slow query, duration: 19.873029s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "UPSERT INTO `/Root/TestTable` (key, value) VALUES (0, 00), (1, 11), (2, 22), (3, 33), (4, 44), (5, 55), (6, 66), (7, 77), (8, 88), (9, 99), (10, 1010), (11, 1111), (12, 1212), (13, 1313), (14, 1414), (15, 1515), (16, 1616), (17, 1717), (18, 1818), (19, 1919), (20, 2020), (21, 2121), (22, 2222), (23, 2323), (24, 2424), (25, 2525), (26, 2626), (27, 2727), (28, 2828), (29, 2929), (30, 3030), (31, 3131), (32, 3232), (33, 3333), (34, 3434), (35, 3535), (36, 3636), (37, 3737), (38, 3838), (39, 3939), (40, 4040), (41, 4141), (42, 4242), (43, 4343), (44, 4444), (45, 4545), (46, 4646), (47, 4747), (48, 4848), (49, 4949), (50, 5050), (51, 5151), (52, 5252), (53, 5353), (54, 5454), (55, 5555), (56, 5656), (57, 5757), (58, 5858), (59, 5959), (60, 6060), (61, 6161), (62, 6262), (63, 6363), (64, 6464), (65, 6565), (66, 6666), (67, 6767), (68, 6868), (69, 6969), (70, 7070), (71, 7171), (72, 7272), (73, 7373), (74, 7474), (75, 7575), (76, 7676), (77, 7777), (78, 7878), (79, 7979), (80, 8080), (81, 8181), (82, 8282), (83, 8383), (84, 8484), (85, 8585), (86, 8686), (87, 8787), (88, 8888), (89, 8989), (90, 9090), (91, 9191), (92, 9292), (93, 9393), (94, 9494), (95, 9595), (96, 9696), (97, 9797), (98, 9898), (99, 9999), (100, 100100), (101, 101101), (102, 102102), (103, 103103), (104, 104104), (105, 105105), (106, 106106), (107, 107107), (108, 108108), (109, 109109), (110, 110110), (111, 111111), (112, 112112), (113, 113113), (114, 114114), (115, 115115), (116, 116116), (117, 117117), (118, 118118), (119, 119119), (120, 120120), (121, 121121), (122, 122122), (123, 123123), (124, 124124), (125, 125125), (126, 126126), (127, 127127), (128, 128128), (129, 129129), (130, 130130), (131, 131131), (132, 132132), (133, 133133), (134, 134134), (135, 135135), (136, 136136), (137, 137137), (138, 138138), (139, 139139), (140, 140140), (141, 141141), (142, 142142), (143, 143143), (144, 144144), (145, 145145), (146, 146146), (147, 147147), (148, 148148), (149, 149149), (150, 150150), (151, 151151), (152, 152152), (153, 153153), (154, 154154), (155, 155155), (156, 156156), (157, 157157), (158, 158158), (159, 159159), (160, 160160), (161, 161161), (162, 162162), (163, 163163), (164, 164164), (165, 165165), (166, 166166), (167, 167167), (168, 168168), (169, 169169), (170, 170170), (171, 171171), (172, 172172), (173, 173173), (174, 174174), (175, 175175), (176, 176176), (177, 177177), (178, 178178), (179, 179179), (180, 180180), (181, 181181), (182, 182182), (183, 183183), (184, 184184), (185, 185185), (186, 186186), (187, 187187), (188, 188188), (189, 189189), (190, 190190), (191, 191191), (192, 192192), (193, 193193), (194, 194194), (195, 195195), (196, 196196), (197, 197197), (198, 198198), (199, 199199), (200, 200200), (201, 201201), (202, 202202), (203, 203203), (204, 204204), (205, 205205), (206, 206206), (207, 207207), (208, 208208), (209, 209209), (210, 210210), (211, 211211), (212, 212212), (213, 213213), (214, 214214), (215, 215215), (216, 216216), (217, 217217), (218, 218218), (219, 219219), (220, 220220), (221, 221221), (222, 222222), (223, 223223), (224, 224224), (225, 225225), (226, 226226), (227, 227227), (228, 228228), (229, 229229), (230, 230230), (231, 231231), (232, 232232), (233, 233233), (234, 234234), (235, 235235), (236, 236236), (237, 237237), (238, 238238), (239, 239239), (240, 240240), (241, 241241), (242, 242242), (243, 243243), (244, 244244), (245, 245245), (246, 246246), (247, 247247), (248, 248248), (249, 249249), (250, 250250), (251, 251251), (252, 252252), (253, 253253), (254, 254254), (255, 255255), (256, 256256), (257, 257257), (258, 258258), (259, 259259), (260, 260260), (261, 261261), (262, 262262), (263, 263263), (264, 264264), (265, 265265), (266, 266266), (267, 267267), (268, 268268), (269, 269269), (270, 270270), (271, 271271), (272, 272272), (273, 273273), (274, 274274), (275, 275275), (276, 276276), (277, 277277), (278, 278278), (279, 279279), (280, 280280), (281, 281281), (282, 282282), (283, 283283), (284, 284284), (285, 285285), (286, 286286), (287, 287287), (288, 288288), (289, 289289), (290, 290290), (291, 291291), (292, 292292), (293, 293293), (294, 294294), (295, 295295), (296, 296296), (297, 297297), (298, 298298), (299, 299299), (300, 300300), (301, 301301), (302, 302302), (303, 303303), (304, 304304), (305, 305305), (306, 306306), (307, 307307), (308, 308308), (309, 309309), (310, 310310), (311, 311311), (312, 312312), (313, 313313), (314, 314314), (315, 315315), (316, 316316), (317, 317317), (318, 318318), (319, 319319), (320, 320320), (321, 321321), (322, 322322), (323, 323323), (324, 324324), (325, 325325), (326, 326326), (327, 327327), (328, 328328), (329, 329329), (330, 330330), (331, 331331), (332, 332332), (333, 333333), (334, 334334), (335, 335335), (336, 336336), (337, 337337), (338, 338338), (339, 339339), (340, 340340), (341, 341341), (342, 342342), (343, 343343), (344, 344344), (345, 345345), (346, 346346), (347, 347347), (348, 348348), (349, 349349), (350, 350350), (351, 351351), (352, 352352), (353, 353353), (354, 354354), (355, 355355), (356, 356356), (357, 357357), (358, 358358), (359, 359359), (360, 360360), (361, 361361), (362, 362362), (363, 363363), (364, 364364), (365, 365365), (366, 366366), (367, 367367), (368, 368368), (369, 369369), (370, 370370), (371, 371371), (372, 372372), (373, 373373), (374, 374374), (375, 375375), (376, 376376), (377, 377377), (378, 378378), (379, 379379), (380, 380380), (381, 381381), (382, 382382), (383, 383383), (384, 384384), (385, 385385), (386, 386386), (387, 387387), (388, 388388), (389, 389389), (390, 390390), (391, 391391), (392, 392392), (393, 393393), (394, 394394), (395, 395395), (396, 396396), (397, 397397), (398, 398398), (399, 399399), (400, 400400), (401, 401401), (402, 402402), (403, 403403), (404, 404404), (405, 405405), (406, 406406), (407, 407407), (408, 408408), (409, 409409), (410, 410410), (411, 411411), (412, 412412), (413, 413413), (414, 414414), (415, 415415), (416, 416416), (417, 417417), (418, 418418), (419, 419419), (420, 420420), (421, 421421), (422, 422422), (423, 423423), (424, 424424), (425, 425425), (426, 426426), (427, 427427), (428, 428428), (429, 429429), (430, 430430), (431, 431431), (432, 432432), (433, 433433), (434, 434434), (435, 435435), (436, 436436), (437, 437437), (438, 438438), (439, 439439), (440, 440440), (441, 441441), (442, 442442), (443, 443443), (444, 444444), (445, 445445), (446, 446446), (447, 447447), (448, 448448), (449, 449449), (450, 450450), (451, 451451), (452, 452452), (453, 453453), (454, 454454), (455, 455455), (456, 456456), (457, 457457), (458, 458458), (459, 459459), (460, 460460), (461, 461461), (462, 462462), (463, 463463), (464, 464464), (465, 465465), (466, 466466), (467, 467467), (468, 468468), (469, 469469), (470, 470470), (471, 471471), (472, 472472), (473, 473473), (474, 474474), (475, 475475), (476, 476476), (477, 477477), (478, 478478), (479, 479479), (480, 480480), (481, 481481), (482, 482482), (483, 483483), (484, 484484), (485, 485485), (486, 486486), (487, 487487), (488, 488488), (489, 489489), (490, 490490), (491, 491491), (492, 492492), (493, 493493), (494, 494494), (495, 495495), (496, 496496), (497, 497497), (498, 498498), (499, 499499), (500, 500500), (501, 501501), (502, 502502), (503, 503503), (504, 504504), (505, 505505), (506, 506506), (507, 507507), (508, 508508), (509, 509509), (510, 510510), (511, 511511), (512, 512512), (513, 513513), (514, 514514), (515, 515515), (516, 516516), (517, 517517), (518, 518518), (519, 519519), (520, 520520), (521, 521521), (522, 522522), (523, 523523), (524, 524524), (525, 525525), (526, 526526), (527, 527527), (528, 528528), (529, 529529), (530, 530530), (531, 531531), (532, 532532), (533, 533533), (534, 534534), (535, 535535), (536, 536536), (537, 537537), (538, 538538), (539, 539539), (540, 540540), (541, 541541), (542, 542542), (543, 543543), (544, 544544), (545, 545545), (546, 546546), (547, 547547), (548, 548548), (549, 549549), (550, 550550), (551, 551551), (552, 552552), (553, 553553), (554, 554554), (555, 555555), (556, 556556), (557, 557557), (558, 558558), (559, 559559), (560, 560560), (561, 561561), (562, 562562), (563, 563563), (564, 564564), (565, 565565), (566, 566566), (567, 567567), (568, 568568), (569, 569569), (570, 570570), (571, 571571), (572, 572572), (573, 573573), (574, 574574), (575, 575575), (576, 576576), (577, 577577), (578, 578578), (579, 579579), (580, 580580), (581, 581581), (582, 582582), (583, 583583), (584, 584584), (585, 585585), (586, 586586), (587, 587587), (588, 588588), (589, 589589), (590, 590590), (591, 591591), (592, 592592), (593, 593593), (594, 594594), (595, 595595), (596, 596596), (597, 597597), (598, 598598), (599, 599599), (600, 600600), (601, 601601), (602, 602602), (603, 603603), (604, 604604), (605, 605605), (606, 606606), (607, 607607), (608, 608608), (609, 609609), (610, 610610), (611, 611611), (612, 612612), (613, 613613), (614, 614614), (615, 615615), (616, 616616), (617, 617617), (618, 618618), (619, 619619), (620, 620620), (621, 621621), (622, 622622), (623, 623623), (624, 624624), (625, 625625), (626, 626626), (627, 627627), (628, 628628), (629, 629629), (630, 630630), (631, 631631), (632, 632632), (633, 633633), (634, 634634), (635, 635635), (636, 636636), (637, 637637), (638, 638638), (639, 639639), (640, 640640), (641, 641641), (642, 642642), (643, 643643), (644, 644644), (645, 645645), (646, 646646), (647, 647647), (648, 648648), (649, 649649), (650, 650650), (651, 651651), (652, 652652), (653, 653653), (654, 654654), (655, 655655), (656, 656656), (657, 657657), (658, 658658), (659, 659659), (660, 660660), (661, 661661), (662, 662662), (663, 663663), (664, 664664), (665, 665665), (666, 666666), (667, 667667), (668, 668668), (669, 669669), (670, 670670), (671, 671671), (672, 672672), (673, 673673), (674, 674674), (675, 675675), (676, 676676), (677, 677677), (678, 678678), (679, 679679), (680, 680680), (681, 681681), (682, 682682), (683, 683683), (684, 684684), (685, 685685), (686, 686686), (687, 687687), (688, 688688), (689, 689689), (690, 690690), (691, 691691), (692, 692692), (693, 693693), (694, 694694), (695, 695695), (696, 696696), (697, 697697), (698, 698698), (699, 699699), (700, 700700), (701, 701701), (702, 702702), (703, 703703), (704, 704704), (705, 705705), (706, 706706), (707, 707707), (708, 708708), (709, 709709), (710, 710710), (711, 711711), (712, 712712), (713, 713713), (714, 714714), (715, 715715), (716, 716716), (717, 717717), (718, 718718), (719, 719719), (720, 720720), (721, 721721), (722, 722722), (723, 723723), (724, 724724), (725, 725725), (726, 726726), (727, 727727), (728, 728728), (729, 729729), (730, 730730), (731, 731731), (732, 732732), (733, 733733), (734, 734734), (735, 735735), (736, 736736), (737, 737737), (738, 738738), (739, 739739), (740, 740740), (741, 741741), (742, 742742), (743, 743743), (744, 744744), (745, 745745), (746, 746746), (747, 747747), (748, 748748), (749, 749749), (750, 750750), (751, 751751), (752, 752752), (753, 753753), (754, 754754), (755, 755755), (756, 756756), (757, 757757), (758, 758758), (759, 759759), (760, 760760), (761, 761761), (762, 762762), (763, 763763), (764, 764764), (765, 765765), (766, 766766), (767, 767767), (768, 768768), (769, 769769), (770, 770770), (771, 771771), (772, 772772), (773, 773773), (774, 774774), (775, 775775), (776, 776776), (777, 777777), (778, 778778), (779, 779779), (780, 780780), (781, 781781), (782, 782782), (783, 783783), (784, 784784), (785, 785785), (786, 786786), (787, 787787), (788, 788788), (789, 789789), (790, 790790), (791, 791791), (792, 792792), (793, 793793), (794, 794794), (795, 795795), (796, 796796), (797, 797797), (798, 798798), (799, 799799), (800, 800800), (801, 801801), (802, 802802), (803, 803803), (804, 804804), (805, 805805), (806, 806806), (807, 807807), (808, 808808), (809, 809809), (810, 810810), (811, 811811), (812, 812812), (813, 813813), (814, 814814), (815, 815815), (816, 816816), (817, 817817), (818, 818818), (819, 819819), (820, 820820), (821, 821821), (822, 822822), (823, 823823), (824, 824824), (825, 825825), (826, 826826), (827, 827827), (828, 828828), (829, 829829), (830, 830830), (831, 831831), (832, 832832), (833, 833833), (834, 834834), (835, 835835), (836, 836836), (837, 837837), (838, 838838), (839, 839839), (840, 840840), (841, 841841), (842, 842842), (843, 843843), (844, 844844), (845, 845845), (846, 846846), (847, 847847), (848, 848848), (849, 849849), (850, 850850), (851, 851851), (852, 852852), (853, 853853), (854, 854854), (855, 855855), (856, 856856), (857, 857857), (858, 858858), (859, 859859), (860, 860860), (861, 861861), (862, 862862), (863, 863863), (864, 864864), (865, 865865), (866, 866866), (867, 867867), (868, 868868), (869, 869869), (870, 870870), (871, 871871), (872, 872872), (873, 873873), (874, 874874), (875, 875875), (876, 876876), (877, 877877), (878, 878878), (879, 879879), (880, 880880), (881, 881881), (882, 882882), (883, 883883), (884, 884884), (885, 885885), (886, 886886), (887, 887887), (888, 888888), (889, 889889), (890, 890890), (891, 891891), (892, 892892), (893, 893893), (894, 894894), (895, 895895), (896, 896896), (897, 897897), (898, 898898), (899, 899899), (900, 900900), (901, 901901), (902, 902902), (903, 903903), (904, 904904), (905, 905905), (906, 906906), (907, 907907), (908, 908908), (909, 909909), (910, 910910), (911, 911911), (912, 912912), (913, 913913), (914, 914914), (915, 915915), (916, 916916), (917, 917917), (918, 918918), (919, 919919), (920, 920920), (921, 921921), (922, 922922), (923, 923923), (924, 924924), (925, 925925), (926, 926926), (927, 927927), (928, 928928), (929, 929929), (930, 930930), (931, 931931), (932, 932932), (933, 933933), (934, 934934), (935, 935935), (936, 936936), (937, 937937), (938, 938938), (939, 939939), (940, 940940), (941, 941941), (942, 942942), (943, 943943), (944, 944944), (945, 945945), (946, 946946), (947, 947947), (948, 948948), (949, 949949), (950, 950950), (951, 951951), (952, 952952), (953, 953953), (954, 954954), (955, 955955), (956, 956956), (957, 957957), (958, 958958), (959, 959959), (960, 960960), (961, 961961), (962, 962962), (963, 963963), (964, 964964), (965, 965965), (966, 966966), (967, 967967), (968, 968968), (969, 969969), (970, 970970), (971, 971971), (972, 972972), (973, 973973), (974, 974974), (975, 975975), (976, 976976), (977, 977977), (978, 978978), (979, 979979), (980, 980980), (981, 981981), (982, 982982), (983, 983983), (984, 984984), (985, 985985), (986, 986986), (987, 987987), (988, 988988), (989, 989989), (990, 990990), (991, 991991), (992, 992992), (993, 993993), (994, 994994), (995, 995995), (996, 996996), (997, 997997), (998, 998998), (999, 999999), (10000, 10000);", parameters: 0b 2025-04-17T17:11:03.147135Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2bq1kd3rpb5zwybrx4xxkq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTQ4OWU3NDAtYTdjNzFiMjgtODIxYTkwZTgtYjZhNGZiMjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Captured TEvDataShard::TEvRead from KQP_SOURCE_READ_ACTOR to TX_DATASHARD_ACTOR >> DataShardWrite::ExecSQLUpsertImmediate+EvWrite >> TTransferTests::CreateSequential [GOOD] >> TTransferTests::CreateInParallel >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite-Volatile >> GenericFederatedQuery::YdbFilterPushdown [GOOD] >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionOperationId ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:86:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:50:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:57:2057] recipient: [13:50:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:74:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:76:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:79:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:80:2057] recipient: [13:78:2110] Leader for TabletID 72057594037927937 is [13:81:2111] sender: [13:82:2057] recipient: [13:78:2110] !Reboot 72057594037927937 (actor [13:56:2097]) rebooted! !Reboot 72057594037927937 (actor [13:56:2097]) tablet resolver refreshed! new actor is[13:81:2111] Leader for TabletID 72057594037927937 is [13:81:2111] sender: [13:135:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:57:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:74:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:76:2057] recipient: [14:36:2083] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:79:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:80:2057] recipient: [14:78:2110] Leader for TabletID 72057594037927937 is [14:81:2111] sender: [14:82:2057] recipient: [14:78:2110] !Reboot 72057594037927937 (actor [14:56:2097]) rebooted! !Reboot 72057594037927937 (actor [14:56:2097]) tablet resolver refreshed! new actor is[14:81:2111] Leader for TabletID 72057594037927937 is [14:81:2111] sender: [14:135:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:57:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:74:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:77:2057] recipient: [15:36:2083] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:80:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:81:2057] recipient: [15:79:2110] Leader for TabletID 72057594037927937 is [15:82:2111] sender: [15:83:2057] recipient: [15:79:2110] !Reboot 72057594037927937 (actor [15:56:2097]) rebooted! !Reboot 72057594037927937 (actor [15:56:2097]) tablet resolver refreshed! new actor is[15:82:2111] Leader for TabletID 72057594037927937 is [15:82:2111] sender: [15:136:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:80:2057] recipient: [16:36:2083] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:83:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:84:2057] recipient: [16:82:2113] Leader for TabletID 72057594037927937 is [16:85:2114] sender: [16:86:2057] recipient: [16:82:2113] !Reboot 72057594037927937 (actor [16:56:2097]) rebooted! !Reboot 72057594037927937 (actor [16:56:2097]) tablet resolver refreshed! new actor is[16:85:2114] Leader for TabletID 72057594037927937 is [16:85:2114] sender: [16:139:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:57:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:74:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:80:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:83:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:84:2057] recipient: [17:82:2113] Leader for TabletID 72057594037927937 is [17:85:2114] sender: [17:86:2057] recipient: [17:82:2113] !Reboot 72057594037927937 (actor [17:56:2097]) rebooted! !Reboot 72057594037927937 (actor [17:56:2097]) tablet resolver refreshed! new actor is[17:85:2114] Leader for TabletID 72057594037927937 is [17:85:2114] sender: [17:139:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:57:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:74:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:81:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:84:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:85:2057] recipient: [18:83:2113] Leader for TabletID 72057594037927937 is [18:86:2114] sender: [18:87:2057] recipient: [18:83:2113] !Reboot 72057594037927937 (actor [18:56:2097]) rebooted! !Reboot 72057594037927937 (actor [18:56:2097]) tablet resolver refreshed! new actor is[18:86:2114] Leader for TabletID 72057594037927937 is [18:86:2114] sender: [18:104:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:52:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:57:2057] recipient: [19:52:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:74:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:83:2057] recipient: [19:36:2083] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:86:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:87:2057] recipient: [19:85:2115] Leader for TabletID 72057594037927937 is [19:88:2116] sender: [19:89:2057] recipient: [19:85:2115] !Reboot 72057594037927937 (actor [19:56:2097]) rebooted! !Reboot 72057594037927937 (actor [19:56:2097]) tablet resolver refreshed! new actor is[19:88:2116] Leader for TabletID 72057594037927937 is [19:88:2116] sender: [19:142:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:57:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:74:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:83:2057] recipient: [20:36:2083] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:86:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:87:2057] recipient: [20:85:2115] Leader for TabletID 72057594037927937 is [20:88:2116] sender: [20:89:2057] recipient: [20:85:2115] !Reboot 72057594037927937 (actor [20:56:2097]) rebooted! !Reboot 72057594037927937 (actor [20:56:2097]) tablet resolver refreshed! new actor is[20:88:2116] Leader for TabletID 72057594037927937 is [20:88:2116] sender: [20:142:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:52:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:57:2057] recipient: [21:52:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:74:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:84:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:87:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:88:2057] recipient: [21:86:2115] Leader for TabletID 72057594037927937 is [21:89:2116] sender: [21:90:2057] recipient: [21:86:2115] !Reboot 72057594037927937 (actor [21:56:2097]) rebooted! !Reboot 72057594037927937 (actor [21:56:2097]) tablet resolver refreshed! new actor is[21:89:2116] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:57:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:74:2057] recipient: [22:14:2061] >> ColumnBuildTest::BaseCase [GOOD] >> DataShardWrite::WriteImmediateBadRequest >> TKesusTest::TestSessionStealingSameKey [GOOD] >> TKesusTest::TestSessionStealingDifferentKey >> TKesusTest::TestAcquireTimeoutAfterReboot [GOOD] >> TKesusTest::TestAcquireSemaphoreViaRelease >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOkNewApi [GOOD] >> TKeyValueTest::TestRenameWorksNewApi [GOOD] >> KqpStreamLookup::ReadTableDuringSplit [GOOD] >> TTransferTests::CreateWrongConfig [GOOD] >> TTransferTests::CreateWrongBatchSize ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::BaseCase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:10:53.192345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:10:53.192471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:10:53.192644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:10:53.192756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:10:53.192878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:10:53.193108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:10:53.193442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:10:53.193732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:10:53.195073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:10:53.730144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:10:53.730191Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:10:53.810338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:10:53.812704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:10:53.814890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:10:53.876741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:10:53.877910Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:10:53.891927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:10:53.892474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:10:53.910680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:10:53.917614Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:10:53.917879Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:10:53.919366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:10:53.919748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:10:53.919957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:10:53.923195Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:10:53.976083Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:10:54.385268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:10:54.385511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:10:54.385679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:10:54.385865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:10:54.385917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:10:54.388390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:10:54.388518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:10:54.388708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:10:54.388760Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:10:54.388799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:10:54.388832Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:10:54.390515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:10:54.390574Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:10:54.390605Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:10:54.392158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:10:54.392205Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:10:54.392243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:10:54.392289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:10:54.395704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:10:54.397569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:10:54.397760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:10:54.398629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:10:54.398780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:10:54.398831Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:10:54.399123Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:10:54.399174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:10:54.399339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:10:54.399434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:10:54.401270Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:10:54.401310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:10:54.401504Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:10:54.401547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:10:54.401903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:10:54.401947Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:10:54.402173Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:10:54.402262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:10:54.402472Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:10:54.402580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:10:54.402653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:10:54.402753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:10:54.402972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:10:54.402998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:10:54.403215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:10:54.403300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:10:54.403427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:10:54.405945Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:10:54.406030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:10:54.406059Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... , LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1143:3014], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }}, record: Status: StatusAccepted TxId: 281474976725761 SchemeshardId: 72075186233409549 PathId: 2 2025-04-17T17:11:06.189061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976725761:0, at schemeshard: 72075186233409549 2025-04-17T17:11:06.189436Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDropLock TPropose opId# 281474976725761:0 ProgressState 2025-04-17T17:11:06.190052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976725761 ready parts: 1/1 2025-04-17T17:11:06.190127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72075186233409550 message:Transaction { AffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 281474976725761 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72075186233409550 2025-04-17T17:11:06.203767Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-04-17T17:11:06.209847Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1143:3014], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-04-17T17:11:06.210270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976725761:4294967295 from tablet: 72075186233409549 to tablet: 72075186233409550 cookie: 0:281474976725761 msg type: 269090816 2025-04-17T17:11:06.210345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976725761, partId: 4294967295, tablet: 72075186233409550 2025-04-17T17:11:06.210669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976725761, at schemeshard: 72075186233409549 2025-04-17T17:11:06.210694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 0/1, is published: true 2025-04-17T17:11:06.210902Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976725761, at schemeshard: 72075186233409549 2025-04-17T17:11:06.229723Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [1:1814:3677], Recipient [1:752:2641]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186233409549 ClientId: [1:1814:3677] ServerId: [1:1816:3679] } 2025-04-17T17:11:06.229978Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-17T17:11:06.322272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 650, transactions count in step: 1, at schemeshard: 72075186233409549 2025-04-17T17:11:06.322369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976725761 AckTo { RawX1: 0 RawX2: 0 } } Step: 650 MediatorID: 72075186233409551 TabletID: 72075186233409549, at schemeshard: 72075186233409549 2025-04-17T17:11:06.322623Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDropLock TPropose opId# 281474976725761:0 HandleReply TEvOperationPlan: step# 650 2025-04-17T17:11:06.322919Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976725761:0 128 -> 240 2025-04-17T17:11:06.346141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976725761:0, at schemeshard: 72075186233409549 2025-04-17T17:11:06.346381Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDone opId# 281474976725761:0 ProgressState 2025-04-17T17:11:06.346634Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976725761:0 progress is 1/1 2025-04-17T17:11:06.346833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-04-17T17:11:06.346862Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976725761:0 progress is 1/1 2025-04-17T17:11:06.346879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-04-17T17:11:06.346903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 1/1, is published: true 2025-04-17T17:11:06.346951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:563:2502] message: TxId: 281474976725761 2025-04-17T17:11:06.346978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-04-17T17:11:06.347166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976725761:0 2025-04-17T17:11:06.347189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976725761:0 2025-04-17T17:11:06.347238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 3 2025-04-17T17:11:06.365160Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976725761 2025-04-17T17:11:06.365228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976725761 2025-04-17T17:11:06.365508Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976725761, buildInfoId: 106 2025-04-17T17:11:06.365785Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976725761, buildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1143:3014], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-04-17T17:11:06.374900Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-04-17T17:11:06.375186Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1143:3014], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-04-17T17:11:06.376671Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-17T17:11:06.381535Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-04-17T17:11:06.381978Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1143:3014], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-04-17T17:11:06.382415Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 106, subscribers count# 1 2025-04-17T17:11:06.382918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-17T17:11:06.383178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:1161:3032] TestWaitNotification: OK eventTxId 106 2025-04-17T17:11:06.412422Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 106 2025-04-17T17:11:06.413724Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 106 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "DefaultValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 100 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 106 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "DefaultValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 100 } >> TKeyValueTest::TestWriteDeleteThenReadRemaining [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:76:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:79:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:80:2057] recipient: [10:78:2110] Leader for TabletID 72057594037927937 is [10:81:2111] sender: [10:82:2057] recipient: [10:78:2110] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:81:2111] Leader for TabletID 72057594037927937 is [10:81:2111] sender: [10:135:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:76:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:79:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:80:2057] recipient: [11:78:2110] Leader for TabletID 72057594037927937 is [11:81:2111] sender: [11:82:2057] recipient: [11:78:2110] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:81:2111] Leader for TabletID 72057594037927937 is [11:81:2111] sender: [11:135:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:77:2057] recipient: [12:36:2083] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:80:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:81:2057] recipient: [12:79:2110] Leader for TabletID 72057594037927937 is [12:82:2111] sender: [12:83:2057] recipient: [12:79:2110] !Reboot 72057594037927937 (actor [12:56:2097]) rebooted! !Reboot 72057594037927937 (actor [12:56:2097]) tablet resolver refreshed! new actor is[12:82:2111] Leader for TabletID 72057594037927937 is [12:82:2111] sender: [12:136:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:50:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:57:2057] recipient: [13:50:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:74:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:80:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:83:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:84:2057] recipient: [13:82:2113] Leader for TabletID 72057594037927937 is [13:85:2114] sender: [13:86:2057] recipient: [13:82:2113] !Reboot 72057594037927937 (actor [13:56:2097]) rebooted! !Reboot 72057594037927937 (actor [13:56:2097]) tablet resolver refreshed! new actor is[13:85:2114] Leader for TabletID 72057594037927937 is [13:85:2114] sender: [13:139:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:57:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:74:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:80:2057] recipient: [14:36:2083] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:83:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:84:2057] recipient: [14:82:2113] Leader for TabletID 72057594037927937 is [14:85:2114] sender: [14:86:2057] recipient: [14:82:2113] !Reboot 72057594037927937 (actor [14:56:2097]) rebooted! !Reboot 72057594037927937 (actor [14:56:2097]) tablet resolver refreshed! new actor is[14:85:2114] Leader for TabletID 72057594037927937 is [14:85:2114] sender: [14:139:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:57:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:74:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:81:2057] recipient: [15:36:2083] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:84:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:85:2057] recipient: [15:83:2113] Leader for TabletID 72057594037927937 is [15:86:2114] sender: [15:87:2057] recipient: [15:83:2113] !Reboot 72057594037927937 (actor [15:56:2097]) rebooted! !Reboot 72057594037927937 (actor [15:56:2097]) tablet resolver refreshed! new actor is[15:86:2114] Leader for TabletID 72057594037927937 is [15:86:2114] sender: [15:140:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:84:2057] recipient: [16:36:2083] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:87:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:88:2057] recipient: [16:86:2116] Leader for TabletID 72057594037927937 is [16:89:2117] sender: [16:90:2057] recipient: [16:86:2116] !Reboot 72057594037927937 (actor [16:56:2097]) rebooted! !Reboot 72057594037927937 (actor [16:56:2097]) tablet resolver refreshed! new actor is[16:89:2117] Leader for TabletID 72057594037927937 is [16:89:2117] sender: [16:143:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:57:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:74:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:84:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:86:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:88:2057] recipient: [17:87:2116] Leader for TabletID 72057594037927937 is [17:89:2117] sender: [17:90:2057] recipient: [17:87:2116] !Reboot 72057594037927937 (actor [17:56:2097]) rebooted! !Reboot 72057594037927937 (actor [17:56:2097]) tablet resolver refreshed! new actor is[17:89:2117] Leader for TabletID 72057594037927937 is [17:89:2117] sender: [17:143:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:57:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:74:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:85:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:88:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:89:2057] recipient: [18:87:2116] Leader for TabletID 72057594037927937 is [18:90:2117] sender: [18:91:2057] recipient: [18:87:2116] !Reboot 72057594037927937 (actor [18:56:2097]) rebooted! !Reboot 72057594037927937 (actor [18:56:2097]) tablet resolver refreshed! new actor is[18:90:2117] Leader for TabletID 72057594037927937 is [18:90:2117] sender: [18:144:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:52:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:57:2057] recipient: [19:52:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:74:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:88:2057] recipient: [19:36:2083] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:91:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:92:2057] recipient: [19:90:2119] Leader for TabletID 72057594037927937 is [19:93:2120] sender: [19:94:2057] recipient: [19:90:2119] !Reboot 72057594037927937 (actor [19:56:2097]) rebooted! !Reboot 72057594037927937 (actor [19:56:2097]) tablet resolver refreshed! new actor is[19:93:2120] Leader for TabletID 72057594037927937 is [19:93:2120] sender: [19:147:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:57:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:74:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:88:2057] recipient: [20:36:2083] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:91:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:92:2057] recipient: [20:90:2119] Leader for TabletID 72057594037927937 is [20:93:2120] sender: [20:94:2057] recipient: [20:90:2119] !Reboot 72057594037927937 (actor [20:56:2097]) rebooted! !Reboot 72057594037927937 (actor [20:56:2097]) tablet resolver refreshed! new actor is[20:93:2120] Leader for TabletID 72057594037927937 is [20:93:2120] sender: [20:147:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:52:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:57:2057] recipient: [21:52:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:74:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:89:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:92:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:93:2057] recipient: [21:91:2119] Leader for TabletID 72057594037927937 is [21:94:2120] sender: [21:95:2057] recipient: [21:91:2119] !Reboot 72057594037927937 (actor [21:56:2097]) rebooted! !Reboot 72057594037927937 (actor [21:56:2097]) tablet resolver refreshed! new actor is[21:94:2120] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:57:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:74:2057] recipient: [22:14:2061] >> TInterconnectTest::TestCrossConnect [GOOD] >> TInterconnectTest::TestManyEventsWithReconnect >> TKesusTest::TestSessionStealingDifferentKey [GOOD] >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorksNewApi [GOOD] >> TKesusTest::TestAcquireSemaphoreViaRelease [GOOD] >> GenericFederatedQuery::YdbManagedSelectAll [GOOD] >> GenericFederatedQuery::YdbManagedSelectConstant ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp/unittest >> KqpStreamLookup::ReadTableDuringSplit [GOOD] Test command err: 2025-04-17T17:10:47.253110Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:10:47.253517Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:10:47.253567Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/0021d8/r3tmp/tmpofQagt/pdisk_1.dat 2025-04-17T17:10:47.599960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:10:47.636266Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:10:47.674907Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:10:47.675028Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:10:47.688334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:10:47.770612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:10:48.156691Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:739:2621], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:48.156895Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:748:2626], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:48.156988Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:48.169760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T17:10:48.363399Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:753:2629], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T17:10:48.439450Z node 1 :TX_PROXY ERROR: Actor# [1:827:2672] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:11:05.525899Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2bpkjs5pkrx0e98pevjjs1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmEzNmM4NWItY2FjM2IzYTItM2QwNGQxZjctMzM3NDQzZDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:11:05.640472Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2bpkjs5pkrx0e98pevjjs1", SessionId: ydb://session/3?node_id=1&id=YmEzNmM4NWItY2FjM2IzYTItM2QwNGQxZjctMzM3NDQzZDQ=, Slow query, duration: 17.486754s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "UPSERT INTO `/Root/TestTable` (key, value) VALUES (0, 00), (1, 11), (2, 22), (3, 33), (4, 44), (5, 55), (6, 66), (7, 77), (8, 88), (9, 99), (10, 1010), (11, 1111), (12, 1212), (13, 1313), (14, 1414), (15, 1515), (16, 1616), (17, 1717), (18, 1818), (19, 1919), (20, 2020), (21, 2121), (22, 2222), (23, 2323), (24, 2424), (25, 2525), (26, 2626), (27, 2727), (28, 2828), (29, 2929), (30, 3030), (31, 3131), (32, 3232), (33, 3333), (34, 3434), (35, 3535), (36, 3636), (37, 3737), (38, 3838), (39, 3939), (40, 4040), (41, 4141), (42, 4242), (43, 4343), (44, 4444), (45, 4545), (46, 4646), (47, 4747), (48, 4848), (49, 4949), (50, 5050), (51, 5151), (52, 5252), (53, 5353), (54, 5454), (55, 5555), (56, 5656), (57, 5757), (58, 5858), (59, 5959), (60, 6060), (61, 6161), (62, 6262), (63, 6363), (64, 6464), (65, 6565), (66, 6666), (67, 6767), (68, 6868), (69, 6969), (70, 7070), (71, 7171), (72, 7272), (73, 7373), (74, 7474), (75, 7575), (76, 7676), (77, 7777), (78, 7878), (79, 7979), (80, 8080), (81, 8181), (82, 8282), (83, 8383), (84, 8484), (85, 8585), (86, 8686), (87, 8787), (88, 8888), (89, 8989), (90, 9090), (91, 9191), (92, 9292), (93, 9393), (94, 9494), (95, 9595), (96, 9696), (97, 9797), (98, 9898), (99, 9999), (100, 100100), (101, 101101), (102, 102102), (103, 103103), (104, 104104), (105, 105105), (106, 106106), (107, 107107), (108, 108108), (109, 109109), (110, 110110), (111, 111111), (112, 112112), (113, 113113), (114, 114114), (115, 115115), (116, 116116), (117, 117117), (118, 118118), (119, 119119), (120, 120120), (121, 121121), (122, 122122), (123, 123123), (124, 124124), (125, 125125), (126, 126126), (127, 127127), (128, 128128), (129, 129129), (130, 130130), (131, 131131), (132, 132132), (133, 133133), (134, 134134), (135, 135135), (136, 136136), (137, 137137), (138, 138138), (139, 139139), (140, 140140), (141, 141141), (142, 142142), (143, 143143), (144, 144144), (145, 145145), (146, 146146), (147, 147147), (148, 148148), (149, 149149), (150, 150150), (151, 151151), (152, 152152), (153, 153153), (154, 154154), (155, 155155), (156, 156156), (157, 157157), (158, 158158), (159, 159159), (160, 160160), (161, 161161), (162, 162162), (163, 163163), (164, 164164), (165, 165165), (166, 166166), (167, 167167), (168, 168168), (169, 169169), (170, 170170), (171, 171171), (172, 172172), (173, 173173), (174, 174174), (175, 175175), (176, 176176), (177, 177177), (178, 178178), (179, 179179), (180, 180180), (181, 181181), (182, 182182), (183, 183183), (184, 184184), (185, 185185), (186, 186186), (187, 187187), (188, 188188), (189, 189189), (190, 190190), (191, 191191), (192, 192192), (193, 193193), (194, 194194), (195, 195195), (196, 196196), (197, 197197), (198, 198198), (199, 199199), (200, 200200), (201, 201201), (202, 202202), (203, 203203), (204, 204204), (205, 205205), (206, 206206), (207, 207207), (208, 208208), (209, 209209), (210, 210210), (211, 211211), (212, 212212), (213, 213213), (214, 214214), (215, 215215), (216, 216216), (217, 217217), (218, 218218), (219, 219219), (220, 220220), (221, 221221), (222, 222222), (223, 223223), (224, 224224), (225, 225225), (226, 226226), (227, 227227), (228, 228228), (229, 229229), (230, 230230), (231, 231231), (232, 232232), (233, 233233), (234, 234234), (235, 235235), (236, 236236), (237, 237237), (238, 238238), (239, 239239), (240, 240240), (241, 241241), (242, 242242), (243, 243243), (244, 244244), (245, 245245), (246, 246246), (247, 247247), (248, 248248), (249, 249249), (250, 250250), (251, 251251), (252, 252252), (253, 253253), (254, 254254), (255, 255255), (256, 256256), (257, 257257), (258, 258258), (259, 259259), (260, 260260), (261, 261261), (262, 262262), (263, 263263), (264, 264264), (265, 265265), (266, 266266), (267, 267267), (268, 268268), (269, 269269), (270, 270270), (271, 271271), (272, 272272), (273, 273273), (274, 274274), (275, 275275), (276, 276276), (277, 277277), (278, 278278), (279, 279279), (280, 280280), (281, 281281), (282, 282282), (283, 283283), (284, 284284), (285, 285285), (286, 286286), (287, 287287), (288, 288288), (289, 289289), (290, 290290), (291, 291291), (292, 292292), (293, 293293), (294, 294294), (295, 295295), (296, 296296), (297, 297297), (298, 298298), (299, 299299), (300, 300300), (301, 301301), (302, 302302), (303, 303303), (304, 304304), (305, 305305), (306, 306306), (307, 307307), (308, 308308), (309, 309309), (310, 310310), (311, 311311), (312, 312312), (313, 313313), (314, 314314), (315, 315315), (316, 316316), (317, 317317), (318, 318318), (319, 319319), (320, 320320), (321, 321321), (322, 322322), (323, 323323), (324, 324324), (325, 325325), (326, 326326), (327, 327327), (328, 328328), (329, 329329), (330, 330330), (331, 331331), (332, 332332), (333, 333333), (334, 334334), (335, 335335), (336, 336336), (337, 337337), (338, 338338), (339, 339339), (340, 340340), (341, 341341), (342, 342342), (343, 343343), (344, 344344), (345, 345345), (346, 346346), (347, 347347), (348, 348348), (349, 349349), (350, 350350), (351, 351351), (352, 352352), (353, 353353), (354, 354354), (355, 355355), (356, 356356), (357, 357357), (358, 358358), (359, 359359), (360, 360360), (361, 361361), (362, 362362), (363, 363363), (364, 364364), (365, 365365), (366, 366366), (367, 367367), (368, 368368), (369, 369369), (370, 370370), (371, 371371), (372, 372372), (373, 373373), (374, 374374), (375, 375375), (376, 376376), (377, 377377), (378, 378378), (379, 379379), (380, 380380), (381, 381381), (382, 382382), (383, 383383), (384, 384384), (385, 385385), (386, 386386), (387, 387387), (388, 388388), (389, 389389), (390, 390390), (391, 391391), (392, 392392), (393, 393393), (394, 394394), (395, 395395), (396, 396396), (397, 397397), (398, 398398), (399, 399399), (400, 400400), (401, 401401), (402, 402402), (403, 403403), (404, 404404), (405, 405405), (406, 406406), (407, 407407), (408, 408408), (409, 409409), (410, 410410), (411, 411411), (412, 412412), (413, 413413), (414, 414414), (415, 415415), (416, 416416), (417, 417417), (418, 418418), (419, 419419), (420, 420420), (421, 421421), (422, 422422), (423, 423423), (424, 424424), (425, 425425), (426, 426426), (427, 427427), (428, 428428), (429, 429429), (430, 430430), (431, 431431), (432, 432432), (433, 433433), (434, 434434), (435, 435435), (436, 436436), (437, 437437), (438, 438438), (439, 439439), (440, 440440), (441, 441441), (442, 442442), (443, 443443), (444, 444444), (445, 445445), (446, 446446), (447, 447447), (448, 448448), (449, 449449), (450, 450450), (451, 451451), (452, 452452), (453, 453453), (454, 454454), (455, 455455), (456, 456456), (457, 457457), (458, 458458), (459, 459459), (460, 460460), (461, 461461), (462, 462462), (463, 463463), (464, 464464), (465, 465465), (466, 466466), (467, 467467), (468, 468468), (469, 469469), (470, 470470), (471, 471471), (472, 472472), (473, 473473), (474, 474474), (475, 475475), (476, 476476), (477, 477477), (478, 478478), (479, 479479), (480, 480480), (481, 481481), (482, 482482), (483, 483483), (484, 484484), (485, 485485), (486, 486486), (487, 487487), (488, 488488), (489, 489489), (490, 490490), (491, 491491), (492, 492492), (493, 493493), (494, 494494), (495, 495495), (496, 496496), (497, 497497), (498, 498498), (499, 499499), (500, 500500), (501, 501501), (502, 502502), (503, 503503), (504, 504504), (505, 505505), (506, 506506), (507, 507507), (508, 508508), (509, 509509), (510, 510510), (511, 511511), (512, 512512), (513, 513513), (514, 514514), (515, 515515), (516, 516516), (517, 517517), (518, 518518), (519, 519519), (520, 520520), (521, 521521), (522, 522522), (523, 523523), (524, 524524), (525, 525525), (526, 526526), (527, 527527), (528, 528528), (529, 529529), (530, 530530), (531, 531531), (532, 532532), (533, 533533), (534, 534534), (535, 535535), (536, 536536), (537, 537537), (538, 538538), (539, 539539), (540, 540540), (541, 541541), (542, 542542), (543, 543543), (544, 544544), (545, 545545), (546, 546546), (547, 547547), (548, 548548), (549, 549549), (550, 550550), (551, 551551), (552, 552552), (553, 553553), (554, 554554), (555, 555555), (556, 556556), (557, 557557), (558, 558558), (559, 559559), (560, 560560), (561, 561561), (562, 562562), (563, 563563), (564, 564564), (565, 565565), (566, 566566), (567, 567567), (568, 568568), (569, 569569), (570, 570570), (571, 571571), (572, 572572), (573, 573573), (574, 574574), (575, 575575), (576, 576576), (577, 577577), (578, 578578), (579, 579579), (580, 580580), (581, 581581), (582, 582582), (583, 583583), (584, 584584), (585, 585585), (586, 586586), (587, 587587), (588, 588588), (589, 589589), (590, 590590), (591, 591591), (592, 592592), (593, 593593), (594, 594594), (595, 595595), (596, 596596), (597, 597597), (598, 598598), (599, 599599), (600, 600600), (601, 601601), (602, 602602), (603, 603603), (604, 604604), (605, 605605), (606, 606606), (607, 607607), (608, 608608), (609, 609609), (610, 610610), (611, 611611), (612, 612612), (613, 613613), (614, 614614), (615, 615615), (616, 616616), (617, 617617), (618, 618618), (619, 619619), (620, 620620), (621, 621621), (622, 622622), (623, 623623), (624, 624624), (625, 625625), (626, 626626), (627, 627627), (628, 628628), (629, 629629), (630, 630630), (631, 631631), (632, 632632), (633, 633633), (634, 634634), (635, 635635), (636, 636636), (637, 637637), (638, 638638), (639, 639639), (640, 640640), (641, 641641), (642, 642642), (643, 643643), (644, 644644), (645, 645645), (646, 646646), (647, 647647), (648, 648648), (649, 649649), (650, 650650), (651, 651651), (652, 652652), (653, 653653), (654, 654654), (655, 655655), (656, 656656), (657, 657657), (658, 658658), (659, 659659), (660, 660660), (661, 661661), (662, 662662), (663, 663663), (664, 664664), (665, 665665), (666, 666666), (667, 667667), (668, 668668), (669, 669669), (670, 670670), (671, 671671), (672, 672672), (673, 673673), (674, 674674), (675, 675675), (676, 676676), (677, 677677), (678, 678678), (679, 679679), (680, 680680), (681, 681681), (682, 682682), (683, 683683), (684, 684684), (685, 685685), (686, 686686), (687, 687687), (688, 688688), (689, 689689), (690, 690690), (691, 691691), (692, 692692), (693, 693693), (694, 694694), (695, 695695), (696, 696696), (697, 697697), (698, 698698), (699, 699699), (700, 700700), (701, 701701), (702, 702702), (703, 703703), (704, 704704), (705, 705705), (706, 706706), (707, 707707), (708, 708708), (709, 709709), (710, 710710), (711, 711711), (712, 712712), (713, 713713), (714, 714714), (715, 715715), (716, 716716), (717, 717717), (718, 718718), (719, 719719), (720, 720720), (721, 721721), (722, 722722), (723, 723723), (724, 724724), (725, 725725), (726, 726726), (727, 727727), (728, 728728), (729, 729729), (730, 730730), (731, 731731), (732, 732732), (733, 733733), (734, 734734), (735, 735735), (736, 736736), (737, 737737), (738, 738738), (739, 739739), (740, 740740), (741, 741741), (742, 742742), (743, 743743), (744, 744744), (745, 745745), (746, 746746), (747, 747747), (748, 748748), (749, 749749), (750, 750750), (751, 751751), (752, 752752), (753, 753753), (754, 754754), (755, 755755), (756, 756756), (757, 757757), (758, 758758), (759, 759759), (760, 760760), (761, 761761), (762, 762762), (763, 763763), (764, 764764), (765, 765765), (766, 766766), (767, 767767), (768, 768768), (769, 769769), (770, 770770), (771, 771771), (772, 772772), (773, 773773), (774, 774774), (775, 775775), (776, 776776), (777, 777777), (778, 778778), (779, 779779), (780, 780780), (781, 781781), (782, 782782), (783, 783783), (784, 784784), (785, 785785), (786, 786786), (787, 787787), (788, 788788), (789, 789789), (790, 790790), (791, 791791), (792, 792792), (793, 793793), (794, 794794), (795, 795795), (796, 796796), (797, 797797), (798, 798798), (799, 799799), (800, 800800), (801, 801801), (802, 802802), (803, 803803), (804, 804804), (805, 805805), (806, 806806), (807, 807807), (808, 808808), (809, 809809), (810, 810810), (811, 811811), (812, 812812), (813, 813813), (814, 814814), (815, 815815), (816, 816816), (817, 817817), (818, 818818), (819, 819819), (820, 820820), (821, 821821), (822, 822822), (823, 823823), (824, 824824), (825, 825825), (826, 826826), (827, 827827), (828, 828828), (829, 829829), (830, 830830), (831, 831831), (832, 832832), (833, 833833), (834, 834834), (835, 835835), (836, 836836), (837, 837837), (838, 838838), (839, 839839), (840, 840840), (841, 841841), (842, 842842), (843, 843843), (844, 844844), (845, 845845), (846, 846846), (847, 847847), (848, 848848), (849, 849849), (850, 850850), (851, 851851), (852, 852852), (853, 853853), (854, 854854), (855, 855855), (856, 856856), (857, 857857), (858, 858858), (859, 859859), (860, 860860), (861, 861861), (862, 862862), (863, 863863), (864, 864864), (865, 865865), (866, 866866), (867, 867867), (868, 868868), (869, 869869), (870, 870870), (871, 871871), (872, 872872), (873, 873873), (874, 874874), (875, 875875), (876, 876876), (877, 877877), (878, 878878), (879, 879879), (880, 880880), (881, 881881), (882, 882882), (883, 883883), (884, 884884), (885, 885885), (886, 886886), (887, 887887), (888, 888888), (889, 889889), (890, 890890), (891, 891891), (892, 892892), (893, 893893), (894, 894894), (895, 895895), (896, 896896), (897, 897897), (898, 898898), (899, 899899), (900, 900900), (901, 901901), (902, 902902), (903, 903903), (904, 904904), (905, 905905), (906, 906906), (907, 907907), (908, 908908), (909, 909909), (910, 910910), (911, 911911), (912, 912912), (913, 913913), (914, 914914), (915, 915915), (916, 916916), (917, 917917), (918, 918918), (919, 919919), (920, 920920), (921, 921921), (922, 922922), (923, 923923), (924, 924924), (925, 925925), (926, 926926), (927, 927927), (928, 928928), (929, 929929), (930, 930930), (931, 931931), (932, 932932), (933, 933933), (934, 934934), (935, 935935), (936, 936936), (937, 937937), (938, 938938), (939, 939939), (940, 940940), (941, 941941), (942, 942942), (943, 943943), (944, 944944), (945, 945945), (946, 946946), (947, 947947), (948, 948948), (949, 949949), (950, 950950), (951, 951951), (952, 952952), (953, 953953), (954, 954954), (955, 955955), (956, 956956), (957, 957957), (958, 958958), (959, 959959), (960, 960960), (961, 961961), (962, 962962), (963, 963963), (964, 964964), (965, 965965), (966, 966966), (967, 967967), (968, 968968), (969, 969969), (970, 970970), (971, 971971), (972, 972972), (973, 973973), (974, 974974), (975, 975975), (976, 976976), (977, 977977), (978, 978978), (979, 979979), (980, 980980), (981, 981981), (982, 982982), (983, 983983), (984, 984984), (985, 985985), (986, 986986), (987, 987987), (988, 988988), (989, 989989), (990, 990990), (991, 991991), (992, 992992), (993, 993993), (994, 994994), (995, 995995), (996, 996996), (997, 997997), (998, 998998), (999, 999999), (10000, 10000);", parameters: 0b 2025-04-17T17:11:06.636710Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2bq4nka5bfrczkfzwypw05, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDM2YzhlNDMtMjA1Njg0YTAtZjYwYTgxNTUtNjFjOTU1MjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Captured TEvDataShard::TEvRead from KQP_SOURCE_READ_ACTOR to TX_DATASHARD_ACTOR 2025-04-17T17:11:06.683618Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2bq4nka5bfrczkfzwypw05, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDM2YzhlNDMtMjA1Njg0YTAtZjYwYTgxNTUtNjFjOTU1MjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Captured TEvDataShard::TEvRead from KQP_STREAM_LOOKUP_ACTOR to TX_DATASHARD_ACTOR --- split started --- --- split finished --- Captured TEvDataShard::TEvRead from KQP_STREAM_LOOKUP_ACTOR to TX_DATASHARD_ACTOR Captured TEvDataShard::TEvRead from KQP_STREAM_LOOKUP_ACTOR to TX_DATASHARD_ACTOR ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestRenameWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:82:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:85:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:84:2114] Leader for TabletID 72057594037927937 is [8:87:2115] sender: [8:88:2057] recipient: [8:84:2114] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:87:2115] Leader for TabletID 72057594037927937 is [8:87:2115] sender: [8:107:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:108:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:86:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:89:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:90:2057] recipient: [10:88:2118] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:92:2057] recipient: [10:88:2118] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:91:2119] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:145:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:86:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:89:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:90:2057] recipient: [11:88:2118] Leader for TabletID 72057594037927937 is [11:91:2119] sender: [11:92:2057] recipient: [11:88:2118] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:91:2119] Leader for TabletID 72057594037927937 is [11:91:2119] sender: [11:145:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:56:209 ... recipient: [15:78:2110] !Reboot 72057594037927937 (actor [15:56:2097]) rebooted! !Reboot 72057594037927937 (actor [15:56:2097]) tablet resolver refreshed! new actor is[15:81:2111] Leader for TabletID 72057594037927937 is [15:81:2111] sender: [15:135:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:76:2057] recipient: [16:36:2083] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:79:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:80:2057] recipient: [16:78:2110] Leader for TabletID 72057594037927937 is [16:81:2111] sender: [16:82:2057] recipient: [16:78:2110] !Reboot 72057594037927937 (actor [16:56:2097]) rebooted! !Reboot 72057594037927937 (actor [16:56:2097]) tablet resolver refreshed! new actor is[16:81:2111] Leader for TabletID 72057594037927937 is [16:81:2111] sender: [16:135:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:57:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:74:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:77:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:80:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:81:2057] recipient: [17:79:2110] Leader for TabletID 72057594037927937 is [17:82:2111] sender: [17:83:2057] recipient: [17:79:2110] !Reboot 72057594037927937 (actor [17:56:2097]) rebooted! !Reboot 72057594037927937 (actor [17:56:2097]) tablet resolver refreshed! new actor is[17:82:2111] Leader for TabletID 72057594037927937 is [17:82:2111] sender: [17:136:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:57:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:74:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:80:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:83:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:84:2057] recipient: [18:82:2113] Leader for TabletID 72057594037927937 is [18:85:2114] sender: [18:86:2057] recipient: [18:82:2113] !Reboot 72057594037927937 (actor [18:56:2097]) rebooted! !Reboot 72057594037927937 (actor [18:56:2097]) tablet resolver refreshed! new actor is[18:85:2114] Leader for TabletID 72057594037927937 is [18:85:2114] sender: [18:139:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:52:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:57:2057] recipient: [19:52:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:74:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:80:2057] recipient: [19:36:2083] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:83:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:84:2057] recipient: [19:82:2113] Leader for TabletID 72057594037927937 is [19:85:2114] sender: [19:86:2057] recipient: [19:82:2113] !Reboot 72057594037927937 (actor [19:56:2097]) rebooted! !Reboot 72057594037927937 (actor [19:56:2097]) tablet resolver refreshed! new actor is[19:85:2114] Leader for TabletID 72057594037927937 is [19:85:2114] sender: [19:139:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:57:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:74:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:81:2057] recipient: [20:36:2083] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:84:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:85:2057] recipient: [20:83:2113] Leader for TabletID 72057594037927937 is [20:86:2114] sender: [20:87:2057] recipient: [20:83:2113] !Reboot 72057594037927937 (actor [20:56:2097]) rebooted! !Reboot 72057594037927937 (actor [20:56:2097]) tablet resolver refreshed! new actor is[20:86:2114] Leader for TabletID 72057594037927937 is [20:86:2114] sender: [20:140:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:52:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:57:2057] recipient: [21:52:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:74:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:56:2097]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:82:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:85:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:86:2057] recipient: [21:84:2114] Leader for TabletID 72057594037927937 is [21:87:2115] sender: [21:88:2057] recipient: [21:84:2114] !Reboot 72057594037927937 (actor [21:56:2097]) rebooted! !Reboot 72057594037927937 (actor [21:56:2097]) tablet resolver refreshed! new actor is[21:87:2115] Leader for TabletID 72057594037927937 is [21:87:2115] sender: [21:107:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:57:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:74:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:56:2097]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:83:2057] recipient: [22:36:2083] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:86:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:87:2057] recipient: [22:85:2115] Leader for TabletID 72057594037927937 is [22:88:2116] sender: [22:89:2057] recipient: [22:85:2115] !Reboot 72057594037927937 (actor [22:56:2097]) rebooted! !Reboot 72057594037927937 (actor [22:56:2097]) tablet resolver refreshed! new actor is[22:88:2116] Leader for TabletID 72057594037927937 is [22:88:2116] sender: [22:108:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:52:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:57:2057] recipient: [23:52:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:74:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:86:2057] recipient: [23:36:2083] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:89:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:90:2057] recipient: [23:88:2118] Leader for TabletID 72057594037927937 is [23:91:2119] sender: [23:92:2057] recipient: [23:88:2118] !Reboot 72057594037927937 (actor [23:56:2097]) rebooted! !Reboot 72057594037927937 (actor [23:56:2097]) tablet resolver refreshed! new actor is[23:91:2119] Leader for TabletID 72057594037927937 is [23:91:2119] sender: [23:145:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:57:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:74:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:86:2057] recipient: [24:36:2083] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:89:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:90:2057] recipient: [24:88:2118] Leader for TabletID 72057594037927937 is [24:91:2119] sender: [24:92:2057] recipient: [24:88:2118] !Reboot 72057594037927937 (actor [24:56:2097]) rebooted! !Reboot 72057594037927937 (actor [24:56:2097]) tablet resolver refreshed! new actor is[24:91:2119] Leader for TabletID 72057594037927937 is [24:91:2119] sender: [24:145:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:50:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:57:2057] recipient: [25:50:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:74:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:87:2057] recipient: [25:36:2083] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:89:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:91:2057] recipient: [25:90:2118] Leader for TabletID 72057594037927937 is [25:92:2119] sender: [25:93:2057] recipient: [25:90:2118] !Reboot 72057594037927937 (actor [25:56:2097]) rebooted! !Reboot 72057594037927937 (actor [25:56:2097]) tablet resolver refreshed! new actor is[25:92:2119] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:51:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:57:2057] recipient: [26:51:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:74:2057] recipient: [26:14:2061] >> GenericFederatedQuery::IcebergHiveSaSelectAll [GOOD] >> GenericFederatedQuery::IcebergHiveSaSelectConstant >> GenericFederatedQuery::IcebergHiveBasicSelectAll [GOOD] >> GenericFederatedQuery::IcebergHiveBasicSelectConstant ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSessionStealingDifferentKey [GOOD] Test command err: 2025-04-17T17:10:05.521242Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:05.521649Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:05.542914Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:05.543214Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:05.572394Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:05.572854Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=7163682796296709044, session=0, seqNo=0) 2025-04-17T17:10:05.573018Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:10:05.584705Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=7163682796296709044, session=1) 2025-04-17T17:10:05.585359Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Fast-path detach session=1 from sender=[1:132:2158], cookie=14250004999198355862 2025-04-17T17:10:05.585740Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[1:145:2169], cookie=13422387710242991027) 2025-04-17T17:10:05.585798Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[1:145:2169], cookie=13422387710242991027) 2025-04-17T17:10:06.003855Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:06.016734Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:06.374934Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:06.389547Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:06.751331Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:06.763173Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:07.115983Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:07.133224Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:07.513956Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:07.536123Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:07.903454Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:07.929614Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:08.343337Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:08.362434Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:08.751812Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:08.776964Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:09.188637Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:09.209037Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:09.668635Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:09.685335Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:10.072508Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:10.089061Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:10.503348Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:10.536439Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:10.948381Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:10.968954Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:11.351643Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:11.365131Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:11.832846Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:11.849696Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:12.260660Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:12.281153Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:12.708511Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:12.725667Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:13.146563Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:13.171764Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:13.580935Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:13.606395Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:14.057219Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:14.075627Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:14.465489Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:14.479083Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:14.869016Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:14.892566Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:15.296419Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:15.314585Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:15.713001Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:15.729135Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:16.156379Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:16.175412Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:16.604878Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:16.624927Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:17.032725Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:17.049246Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:17.478371Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:17.497254Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:17.860424Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:17.873101Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:18.323341Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:18.343997Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:18.733230Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:18.745299Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:19.096120Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:19.108468Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:19.449295Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:19.461065Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:19.815942Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:19.837181Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:20.230033Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:20.244843Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:20.680443Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:20.695064Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:21.089993Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:21.107570Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:21.548702Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:21.573331Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:22.000908Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:22.033217Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:22.498547Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:22.533085Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:22.940532Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:22.964686Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:23.404206Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:23.429162Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:23.860200Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:23.884495Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:24.304442Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:24.324853Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:24.772495Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:24.792799Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:25.224693Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:25.243721Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:25.664444Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:25.686012Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:26.076433Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:26.093185Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:26.468364Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:26.480125Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:26.850146Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:26.861912Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:27.203283Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:27.215616Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:27.580487Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:27.592415Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:27.932393Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:27.944202Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:28.286296Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:28.298025Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TT ... eck::Execute 2025-04-17T17:10:45.448558Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:45.886731Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:45.903389Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:46.328399Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:46.343782Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:46.729642Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:46.743273Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:47.102985Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:47.116341Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:47.480433Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:47.496735Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:47.935944Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:47.948237Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:48.304892Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:48.318482Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:48.687464Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:48.699554Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:49.053608Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:49.067129Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:49.440491Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:49.459726Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:49.880636Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:49.904605Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:50.328574Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:50.364783Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:50.764371Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:50.784737Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:51.224513Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:51.237542Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:51.627370Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:51.640799Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:52.096458Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:52.118573Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:52.512443Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:52.535323Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:52.918833Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:52.936446Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:53.309981Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:53.323011Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:53.689517Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:53.710409Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:54.137953Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:54.155867Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:54.536372Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:54.552923Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:54.936447Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:54.948522Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:55.326301Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:55.341156Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:55.736476Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:55.750029Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:56.203369Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:56.217135Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:56.633424Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:56.650391Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:57.040358Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:57.060834Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:57.472636Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:57.493066Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:57.888452Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:57.908886Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:58.320478Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:58.341066Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:58.759571Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:58.777576Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:59.166848Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:59.184426Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:59.589214Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:59.610441Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:11:00.012364Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:11:00.029105Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:11:00.615346Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:11:00.653407Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:11:01.079013Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:11:01.118041Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:11:01.536817Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:11:01.557951Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:11:01.976247Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:11:02.003787Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:11:02.428907Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:11:02.446495Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:11:02.873391Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:11:02.892897Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:11:03.323473Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:11:03.344863Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:11:03.783949Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:11:03.805041Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:11:04.216461Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:11:04.229949Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:11:04.614886Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:11:04.638105Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:11:05.010008Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-04-17T17:11:05.010095Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-04-17T17:11:05.023254Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-04-17T17:11:05.034758Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[2:572:2566], cookie=1381489530544966725) 2025-04-17T17:11:05.034875Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[2:572:2566], cookie=1381489530544966725) 2025-04-17T17:11:07.276036Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:11:07.276466Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:11:07.338060Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:11:07.338626Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:11:07.383054Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:11:07.388291Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:130:2156], cookie=12345, session=0, seqNo=0) 2025-04-17T17:11:07.388427Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:11:07.423596Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:130:2156], cookie=12345, session=1) 2025-04-17T17:11:07.427062Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:136:2161], cookie=23456, session=1, seqNo=0) 2025-04-17T17:11:07.458212Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:136:2161], cookie=23456, session=1) 2025-04-17T17:11:11.776923Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:11:11.777045Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:11:11.833148Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:11:11.833292Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:11:11.854901Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:11:11.858592Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:130:2156], cookie=12345, session=0, seqNo=0) 2025-04-17T17:11:11.859303Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:11:11.885920Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:130:2156], cookie=12345, session=1) 2025-04-17T17:11:11.886598Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:136:2161], cookie=23456, session=1, seqNo=0) 2025-04-17T17:11:11.902132Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:136:2161], cookie=23456, session=1) >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorksNewApi [GOOD] >> GenericFederatedQuery::IcebergHadoopSaSelectAll [GOOD] >> GenericFederatedQuery::IcebergHadoopSaSelectConstant ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteDeleteThenReadRemaining [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] 2025-04-17T17:10:28.129602Z node 1 :KEYVALUE ERROR: KeyValue# 72057594037927937 PrepareExecuteTransactionRequest return flase, Marker# KV73 Submsg# KeyValue# 72057594037927937 Can't delete Range, in DeleteRange, total limit of deletions per request (100000) reached, Marker# KV90 Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] 2025-04-17T17:10:53.815300Z node 2 :KEYVALUE ERROR: KeyValue# 72057594037927937 PrepareExecuteTransactionRequest return flase, Marker# KV73 Submsg# KeyValue# 72057594037927937 Can't delete Range, in DeleteRange, total limit of deletions per request (100000) reached, Marker# KV90 Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:449:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:452:2057] recipient: [4:451:2377] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:453:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:454:2378] sender: [4:455:2057] recipient: [4:451:2377] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:454:2378] Leader for TabletID 72057594037927937 is [4:454:2378] sender: [4:508:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:449:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:452:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:453:2057] recipient: [5:451:2377] Leader for TabletID 72057594037927937 is [5:454:2378] sender: [5:455:2057] recipient: [5:451:2377] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:454:2378] Leader for TabletID 72057594037927937 is [5:454:2378] sender: [5:508:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:450:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:453:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:454:2057] recipient: [6:452:2377] Leader for TabletID 72057594037927937 is [6:455:2378] sender: [6:456:2057] recipient: [6:452:2377] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:455:2378] Leader for TabletID 72057594037927937 is [6:455:2378] sender: [6:509:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphoreViaRelease [GOOD] Test command err: 2025-04-17T17:10:06.365483Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:06.365820Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:06.395517Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:06.396128Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:06.431346Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:06.431784Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=6995418620021095223, session=0, seqNo=0) 2025-04-17T17:10:06.431947Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:10:06.443565Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=6995418620021095223, session=1) 2025-04-17T17:10:06.444273Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:132:2158], cookie=111, session=1, semaphore="Lock1" count=1) 2025-04-17T17:10:06.444432Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-17T17:10:06.444550Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-17T17:10:06.456432Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:132:2158], cookie=111) 2025-04-17T17:10:06.456714Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:132:2158], cookie=222, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-17T17:10:06.469286Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:132:2158], cookie=222) 2025-04-17T17:10:06.469789Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:148:2172], cookie=15840873678174564318, name="Lock1") 2025-04-17T17:10:06.469862Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:148:2172], cookie=15840873678174564318) 2025-04-17T17:10:07.569216Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:10:07.569666Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:10:07.612636Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:10:07.612863Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:10:07.656567Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:10:07.658930Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:132:2158], cookie=14152552581326982603, session=0, seqNo=0) 2025-04-17T17:10:07.659154Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:10:07.690350Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:132:2158], cookie=14152552581326982603, session=1) 2025-04-17T17:10:07.691771Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:133:2159], cookie=10747277892041693913, session=0, seqNo=0) 2025-04-17T17:10:07.692484Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-17T17:10:07.708193Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:133:2159], cookie=10747277892041693913, session=2) 2025-04-17T17:10:07.713238Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:132:2158], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-17T17:10:07.714070Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-17T17:10:07.714420Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-17T17:10:07.741175Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:132:2158], cookie=111) 2025-04-17T17:10:07.742100Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:132:2158], cookie=112, session=1, semaphore="Lock2" count=1) 2025-04-17T17:10:07.742994Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2025-04-17T17:10:07.743538Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2025-04-17T17:10:07.768087Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:132:2158], cookie=112) 2025-04-17T17:10:07.769728Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:133:2159], cookie=222, session=2, semaphore="Lock1" count=1) 2025-04-17T17:10:07.770380Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:133:2159], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2025-04-17T17:10:07.794172Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:133:2159], cookie=222) 2025-04-17T17:10:07.794519Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:133:2159], cookie=223) 2025-04-17T17:10:07.795780Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:133:2159], cookie=333, session=2, semaphore="Lock1" count=1) 2025-04-17T17:10:07.802278Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:133:2159], cookie=334, session=2, semaphore="Lock2" count=18446744073709551615) 2025-04-17T17:10:07.819249Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:133:2159], cookie=333) 2025-04-17T17:10:07.819523Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:133:2159], cookie=334) 2025-04-17T17:10:08.308312Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:08.325335Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:08.702531Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:08.725371Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:09.109590Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:09.128802Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:09.500346Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:09.512685Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:09.903668Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:09.920717Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:10.285666Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:10.309430Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:10.695379Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:10.747901Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:11.113960Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:11.127267Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:11.491380Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:11.509171Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:11.969013Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:11.988743Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:12.412405Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:12.432994Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:12.830091Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:12.854791Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:13.266815Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:13.288825Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:13.716446Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:13.736703Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:14.176449Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:14.195279Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:14.571850Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:14.591091Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:15.000504Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:15.014654Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:15.408569Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:15.425273Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:15.812008Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:15.838144Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:16.318309Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:16.345097Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:16.760052Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:16.790399Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:17.211190Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:17.226349Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:17.658669Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:17.674877Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:18.065479Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:18.080856Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:18.457316Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:18.472855Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:18.833712Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:18.845861Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:19.207865Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:19.220018Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:19.570193Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:19.582162Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:19.953223Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:19.970331Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:20.372435Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreTimeout::Execute (session=2, semaphore=2) 2025-04-17T17:10:20.372547Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 2 "Lock2" waiter link 2025-04-17T17:10:20.385619Z node 2 :KESUS_TABLET DEBUG: ... xecute 2025-04-17T17:10:56.432864Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:56.900544Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:56.920766Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:57.323745Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:57.349986Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:57.779810Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:57.799845Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:58.204437Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:58.221619Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:58.664840Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:58.681589Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:59.149157Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:59.166241Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:10:59.583250Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:10:59.606361Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:11:00.052385Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:11:00.073089Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:11:00.488809Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:11:00.513812Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:11:00.911288Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:11:00.940863Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:11:01.426095Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:11:01.439144Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:11:01.896590Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:11:01.912895Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:11:02.344508Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:11:02.369143Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:11:02.796795Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:11:02.815778Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:11:03.256770Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:11:03.285415Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:11:03.748434Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:11:03.768061Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:11:04.200498Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:11:04.213918Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:11:04.596062Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:11:04.610815Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:11:04.990771Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:11:05.006385Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:11:05.384505Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:11:05.398736Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:11:05.786888Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:11:05.801237Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:11:06.212741Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:11:06.237390Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:11:06.633196Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:11:06.653310Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:11:07.072479Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:11:07.106475Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:11:07.588846Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:11:07.621081Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:11:08.184772Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreTimeout::Execute (session=2, semaphore=1) 2025-04-17T17:11:08.185099Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2025-04-17T17:11:08.213217Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreTimeout::Complete (session=2, semaphore=1) 2025-04-17T17:11:08.252189Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:534:2530], cookie=6631913404066124606) 2025-04-17T17:11:08.252502Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:534:2530], cookie=6631913404066124606) 2025-04-17T17:11:08.272367Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:537:2533], cookie=1162395462008530138) 2025-04-17T17:11:08.272931Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:537:2533], cookie=1162395462008530138) 2025-04-17T17:11:08.275990Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:540:2536], cookie=26450516426413653, name="Lock1") 2025-04-17T17:11:08.297094Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:540:2536], cookie=26450516426413653) 2025-04-17T17:11:08.316484Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:543:2539], cookie=2298141422191872222, name="Lock1") 2025-04-17T17:11:08.316624Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:543:2539], cookie=2298141422191872222) 2025-04-17T17:11:12.357671Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:11:12.357778Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:11:12.426965Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:11:12.427295Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:11:12.463395Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:11:12.465318Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=14330981420715301257, session=0, seqNo=0) 2025-04-17T17:11:12.471090Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:11:12.492442Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=14330981420715301257, session=1) 2025-04-17T17:11:12.493836Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=6378052115191922481, session=0, seqNo=0) 2025-04-17T17:11:12.500338Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-17T17:11:12.517528Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=6378052115191922481, session=2) 2025-04-17T17:11:12.518752Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=6060278372053430045, session=0, seqNo=0) 2025-04-17T17:11:12.519255Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 3 2025-04-17T17:11:12.538781Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=6060278372053430045, session=3) 2025-04-17T17:11:12.541370Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:145:2169], cookie=16139977524150715601, name="Sem1", limit=3) 2025-04-17T17:11:12.542218Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-04-17T17:11:12.563956Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:145:2169], cookie=16139977524150715601) 2025-04-17T17:11:12.565171Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=111, session=1, semaphore="Sem1" count=2) 2025-04-17T17:11:12.565976Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-04-17T17:11:12.571599Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=222, session=2, semaphore="Sem1" count=2) 2025-04-17T17:11:12.572162Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=333, session=3, semaphore="Sem1" count=1) 2025-04-17T17:11:12.598082Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=111) 2025-04-17T17:11:12.598402Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=222) 2025-04-17T17:11:12.598436Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=333) 2025-04-17T17:11:12.602113Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:154:2178], cookie=3163890213728631595, name="Sem1") 2025-04-17T17:11:12.602425Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:154:2178], cookie=3163890213728631595) 2025-04-17T17:11:12.608144Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:157:2181], cookie=6021763465518347195, name="Sem1") 2025-04-17T17:11:12.608906Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:157:2181], cookie=6021763465518347195) 2025-04-17T17:11:12.609958Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:132:2158], cookie=444, name="Sem1") 2025-04-17T17:11:12.616331Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2025-04-17T17:11:12.616839Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2025-04-17T17:11:12.617116Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2025-04-17T17:11:12.636394Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:132:2158], cookie=444) 2025-04-17T17:11:12.639342Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:162:2186], cookie=7111199304050915897, name="Sem1") 2025-04-17T17:11:12.648919Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:162:2186], cookie=7111199304050915897) 2025-04-17T17:11:12.660921Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:165:2189], cookie=14050166486263620138, name="Sem1") 2025-04-17T17:11:12.661239Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:165:2189], cookie=14050166486263620138) >> TInterconnectTest::TestManyEventsWithReconnect [GOOD] >> TInterconnectTest::TestEventWithPayloadSerialization |74.5%| [TA] $(B)/ydb/core/tx/datashard/ut_kqp/test-results/unittest/{meta.json ... results_accumulator.log} >> GenericFederatedQuery::ClickHouseManagedSelectAll [GOOD] >> GenericFederatedQuery::ClickHouseManagedSelectConstant >> TTransferTests::CreateInParallel [GOOD] >> TTransferTests::CreateDropRecreate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:86:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:86:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:89:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:90:2057] recipient: [11:88:2117] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:92:2057] recipient: [11:88:2117] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:91:2118] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:145:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... bletID 72057594037927937 is [13:56:2097] sender: [13:87:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:90:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:91:2057] recipient: [13:89:2117] Leader for TabletID 72057594037927937 is [13:92:2118] sender: [13:93:2057] recipient: [13:89:2117] !Reboot 72057594037927937 (actor [13:56:2097]) rebooted! !Reboot 72057594037927937 (actor [13:56:2097]) tablet resolver refreshed! new actor is[13:92:2118] Leader for TabletID 72057594037927937 is [13:92:2118] sender: [13:146:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:57:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:74:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:57:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:74:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:76:2057] recipient: [16:36:2083] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:79:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:80:2057] recipient: [16:78:2110] Leader for TabletID 72057594037927937 is [16:81:2111] sender: [16:82:2057] recipient: [16:78:2110] !Reboot 72057594037927937 (actor [16:56:2097]) rebooted! !Reboot 72057594037927937 (actor [16:56:2097]) tablet resolver refreshed! new actor is[16:81:2111] Leader for TabletID 72057594037927937 is [16:81:2111] sender: [16:135:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:57:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:74:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:76:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:78:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:80:2057] recipient: [17:79:2110] Leader for TabletID 72057594037927937 is [17:81:2111] sender: [17:82:2057] recipient: [17:79:2110] !Reboot 72057594037927937 (actor [17:56:2097]) rebooted! !Reboot 72057594037927937 (actor [17:56:2097]) tablet resolver refreshed! new actor is[17:81:2111] Leader for TabletID 72057594037927937 is [17:81:2111] sender: [17:135:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:57:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:74:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:77:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:80:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:81:2057] recipient: [18:79:2110] Leader for TabletID 72057594037927937 is [18:82:2111] sender: [18:83:2057] recipient: [18:79:2110] !Reboot 72057594037927937 (actor [18:56:2097]) rebooted! !Reboot 72057594037927937 (actor [18:56:2097]) tablet resolver refreshed! new actor is[18:82:2111] Leader for TabletID 72057594037927937 is [18:82:2111] sender: [18:136:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:52:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:57:2057] recipient: [19:52:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:74:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:80:2057] recipient: [19:36:2083] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:82:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:84:2057] recipient: [19:83:2113] Leader for TabletID 72057594037927937 is [19:85:2114] sender: [19:86:2057] recipient: [19:83:2113] !Reboot 72057594037927937 (actor [19:56:2097]) rebooted! !Reboot 72057594037927937 (actor [19:56:2097]) tablet resolver refreshed! new actor is[19:85:2114] Leader for TabletID 72057594037927937 is [19:85:2114] sender: [19:139:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:57:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:74:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:80:2057] recipient: [20:36:2083] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:83:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:84:2057] recipient: [20:82:2113] Leader for TabletID 72057594037927937 is [20:85:2114] sender: [20:86:2057] recipient: [20:82:2113] !Reboot 72057594037927937 (actor [20:56:2097]) rebooted! !Reboot 72057594037927937 (actor [20:56:2097]) tablet resolver refreshed! new actor is[20:85:2114] Leader for TabletID 72057594037927937 is [20:85:2114] sender: [20:139:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:52:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:57:2057] recipient: [21:52:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:74:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:81:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:84:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:85:2057] recipient: [21:83:2113] Leader for TabletID 72057594037927937 is [21:86:2114] sender: [21:87:2057] recipient: [21:83:2113] !Reboot 72057594037927937 (actor [21:56:2097]) rebooted! !Reboot 72057594037927937 (actor [21:56:2097]) tablet resolver refreshed! new actor is[21:86:2114] Leader for TabletID 72057594037927937 is [21:86:2114] sender: [21:104:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:57:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:74:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:83:2057] recipient: [22:36:2083] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:86:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:87:2057] recipient: [22:85:2115] Leader for TabletID 72057594037927937 is [22:88:2116] sender: [22:89:2057] recipient: [22:85:2115] !Reboot 72057594037927937 (actor [22:56:2097]) rebooted! !Reboot 72057594037927937 (actor [22:56:2097]) tablet resolver refreshed! new actor is[22:88:2116] Leader for TabletID 72057594037927937 is [22:88:2116] sender: [22:142:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:52:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:57:2057] recipient: [23:52:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:74:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:83:2057] recipient: [23:36:2083] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:86:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:87:2057] recipient: [23:85:2115] Leader for TabletID 72057594037927937 is [23:88:2116] sender: [23:89:2057] recipient: [23:85:2115] !Reboot 72057594037927937 (actor [23:56:2097]) rebooted! !Reboot 72057594037927937 (actor [23:56:2097]) tablet resolver refreshed! new actor is[23:88:2116] Leader for TabletID 72057594037927937 is [23:88:2116] sender: [23:142:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:57:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:74:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:84:2057] recipient: [24:36:2083] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:87:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:88:2057] recipient: [24:86:2115] Leader for TabletID 72057594037927937 is [24:89:2116] sender: [24:90:2057] recipient: [24:86:2115] !Reboot 72057594037927937 (actor [24:56:2097]) rebooted! !Reboot 72057594037927937 (actor [24:56:2097]) tablet resolver refreshed! new actor is[24:89:2116] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:50:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:57:2057] recipient: [25:50:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:74:2057] recipient: [25:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:85:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:86:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:86:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:86:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:50:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:57:2057] recipient: [13:50:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:74:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:76:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:79:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:80:2057] recipient: [13:78:2110] Leader for TabletID 72057594037927937 is [13:81:2111] sender: [13:82:2057] recipient: [13:78:2110] !Reboot 72057594037927937 (actor [13:56:2097]) rebooted! !Reboot 72057594037927937 (actor [13:56:2097]) tablet resolver refreshed! new actor is[13:81:2111] Leader for TabletID 72057594037927937 is [13:81:2111] sender: [13:135:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:57:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:74:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:76:2057] recipient: [14:36:2083] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:79:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:80:2057] recipient: [14:78:2110] Leader for TabletID 72057594037927937 is [14:81:2111] sender: [14:82:2057] recipient: [14:78:2110] !Reboot 72057594037927937 (actor [14:56:2097]) rebooted! !Reboot 72057594037927937 (actor [14:56:2097]) tablet resolver refreshed! new actor is[14:81:2111] Leader for TabletID 72057594037927937 is [14:81:2111] sender: [14:135:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:57:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:74:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:77:2057] recipient: [15:36:2083] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:80:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:81:2057] recipient: [15:79:2110] Leader for TabletID 72057594037927937 is [15:82:2111] sender: [15:83:2057] recipient: [15:79:2110] !Reboot 72057594037927937 (actor [15:56:2097]) rebooted! !Reboot 72057594037927937 (actor [15:56:2097]) tablet resolver refreshed! new actor is[15:82:2111] Leader for TabletID 72057594037927937 is [15:82:2111] sender: [15:136:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:80:2057] recipient: [16:36:2083] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:83:2057] recipient: [16:82:2113] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:84:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:85:2114] sender: [16:86:2057] recipient: [16:82:2113] !Reboot 72057594037927937 (actor [16:56:2097]) rebooted! !Reboot 72057594037927937 (actor [16:56:2097]) tablet resolver refreshed! new actor is[16:85:2114] Leader for TabletID 72057594037927937 is [16:85:2114] sender: [16:139:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:57:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:74:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:80:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:83:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:84:2057] recipient: [17:82:2113] Leader for TabletID 72057594037927937 is [17:85:2114] sender: [17:86:2057] recipient: [17:82:2113] !Reboot 72057594037927937 (actor [17:56:2097]) rebooted! !Reboot 72057594037927937 (actor [17:56:2097]) tablet resolver refreshed! new actor is[17:85:2114] Leader for TabletID 72057594037927937 is [17:85:2114] sender: [17:139:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:57:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:74:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:81:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:84:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:85:2057] recipient: [18:83:2113] Leader for TabletID 72057594037927937 is [18:86:2114] sender: [18:87:2057] recipient: [18:83:2113] !Reboot 72057594037927937 (actor [18:56:2097]) rebooted! !Reboot 72057594037927937 (actor [18:56:2097]) tablet resolver refreshed! new actor is[18:86:2114] Leader for TabletID 72057594037927937 is [18:86:2114] sender: [18:104:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:52:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:57:2057] recipient: [19:52:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:74:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:83:2057] recipient: [19:36:2083] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:86:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:87:2057] recipient: [19:85:2115] Leader for TabletID 72057594037927937 is [19:88:2116] sender: [19:89:2057] recipient: [19:85:2115] !Reboot 72057594037927937 (actor [19:56:2097]) rebooted! !Reboot 72057594037927937 (actor [19:56:2097]) tablet resolver refreshed! new actor is[19:88:2116] Leader for TabletID 72057594037927937 is [19:88:2116] sender: [19:142:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:57:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:74:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:83:2057] recipient: [20:36:2083] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:86:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:87:2057] recipient: [20:85:2115] Leader for TabletID 72057594037927937 is [20:88:2116] sender: [20:89:2057] recipient: [20:85:2115] !Reboot 72057594037927937 (actor [20:56:2097]) rebooted! !Reboot 72057594037927937 (actor [20:56:2097]) tablet resolver refreshed! new actor is[20:88:2116] Leader for TabletID 72057594037927937 is [20:88:2116] sender: [20:142:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:52:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:57:2057] recipient: [21:52:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:74:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:84:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:87:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:88:2057] recipient: [21:86:2115] Leader for TabletID 72057594037927937 is [21:89:2116] sender: [21:90:2057] recipient: [21:86:2115] !Reboot 72057594037927937 (actor [21:56:2097]) rebooted! !Reboot 72057594037927937 (actor [21:56:2097]) tablet resolver refreshed! new actor is[21:89:2116] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:57:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:74:2057] recipient: [22:14:2061] >> TTransferTests::CreateWrongBatchSize [GOOD] >> TTransferTests::CreateWrongFlushIntervalIsSmall |74.5%| [TA] $(B)/ydb/core/kesus/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> SystemView::CollectPreparedQueries >> TKeyValueTest::TestRenameToLongKey [GOOD] >> TInterconnectTest::TestEventWithPayloadSerialization [GOOD] >> GenericFederatedQuery::IcebergHiveTokenSelectAll [GOOD] >> GenericFederatedQuery::IcebergHiveTokenSelectConstant >> SystemView::Nodes >> DbCounters::TabletsSimple >> TTxDataShardMiniKQL::TableStatsHistograms [GOOD] >> SystemView::VSlotsFields >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] >> SystemView::AuthGroups_Access ------- [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestEventWithPayloadSerialization [GOOD] Test command err: Starting iteration 0 Starting iteration 1 Starting iteration 2 Starting iteration 3 Starting iteration 4 Starting iteration 5 Starting iteration 6 Starting iteration 7 Starting iteration 8 Starting iteration 9 Starting iteration 10 Starting iteration 11 Starting iteration 12 Starting iteration 13 Starting iteration 14 Starting iteration 15 Starting iteration 16 Starting iteration 17 Starting iteration 18 Starting iteration 19 Starting iteration 20 Starting iteration 21 Starting iteration 22 Starting iteration 23 Starting iteration 24 Starting iteration 25 Starting iteration 26 Starting iteration 27 Starting iteration 28 Starting iteration 29 Starting iteration 30 Starting iteration 31 Starting iteration 32 Starting iteration 33 Starting iteration 34 Starting iteration 35 Starting iteration 36 Starting iteration 37 Starting iteration 38 Starting iteration 39 Starting iteration 40 Starting iteration 41 Starting iteration 42 Starting iteration 43 Starting iteration 44 Starting iteration 45 Starting iteration 46 Starting iteration 47 Starting iteration 48 Starting iteration 49 0 0 0 1 0 3 0 7 0 15 0 31 0 63 0 127 0 255 0 511 0 1023 0 2047 0 4095 0 8191 0 16383 0 32767 0 65535 1 0 1 1 1 3 1 7 1 15 1 31 1 63 1 127 1 255 1 511 1 1023 1 2047 1 4095 1 8191 1 16383 1 32767 1 65535 3 0 3 1 3 3 3 7 3 15 3 31 3 63 3 127 3 255 3 511 3 1023 3 2047 3 4095 3 8191 3 16383 3 32767 3 65535 7 0 7 1 7 3 7 7 7 15 7 31 7 63 7 127 7 255 7 511 7 1023 7 2047 7 4095 7 8191 7 16383 7 32767 7 65535 15 0 15 1 15 3 15 7 15 15 15 31 15 63 15 127 15 255 15 511 15 1023 15 2047 15 4095 15 8191 15 16383 15 32767 15 65535 31 0 31 1 31 3 31 7 31 15 31 31 31 63 31 127 31 255 31 511 31 1023 31 2047 31 4095 31 8191 31 16383 31 32767 31 65535 63 0 63 1 63 3 63 7 63 15 63 31 63 63 63 127 63 255 63 511 63 1023 63 2047 63 4095 63 8191 63 16383 63 32767 63 65535 127 0 127 1 127 3 127 7 127 15 127 31 127 63 127 127 127 255 127 511 127 1023 127 2047 127 4095 127 8191 127 16383 127 32767 127 65535 255 0 255 1 255 3 255 7 255 15 255 31 255 63 255 127 255 255 255 511 255 1023 255 2047 255 4095 255 8191 255 16383 255 32767 255 65535 511 0 511 1 511 3 511 7 511 15 511 31 511 63 511 127 511 255 511 511 511 1023 511 2047 511 4095 511 8191 511 16383 511 32767 511 65535 1023 0 1023 1 1023 3 1023 7 1023 15 1023 31 1023 63 1023 127 1023 255 1023 511 1023 1023 1023 2047 1023 4095 1023 8191 1023 16383 1023 32767 1023 65535 2047 0 2047 1 2047 3 2047 7 2047 15 2047 31 2047 63 2047 127 2047 255 2047 511 2047 1023 2047 2047 2047 4095 2047 8191 2047 16383 2047 32767 2047 65535 4095 0 4095 1 4095 3 4095 7 4095 15 4095 31 4095 63 4095 127 4095 255 4095 511 4095 1023 4095 2047 4095 4095 4095 8191 4095 16383 4095 32767 4095 65535 8191 0 8191 1 8191 3 8191 7 8191 15 8191 31 8191 63 8191 127 8191 255 8191 511 8191 1023 8191 2047 8191 4095 8191 8191 8191 16383 8191 32767 8191 65535 16383 0 16383 1 16383 3 16383 7 16383 15 16383 31 16383 63 16383 127 16383 255 16383 511 16383 1023 16383 2047 16383 4095 16383 8191 16383 16383 16383 32767 16383 65535 32767 0 32767 1 32767 3 32767 7 32767 15 32767 31 32767 63 32767 127 32767 255 32767 511 32767 1023 32767 2047 32767 4095 32767 8191 32767 16383 32767 32767 32767 65535 65535 0 65535 1 65535 3 65535 7 65535 15 65535 31 65535 63 65535 127 65535 255 65535 511 65535 1023 65535 2047 65535 4095 65535 8191 65535 16383 65535 32767 65535 65535 >> GenericFederatedQuery::IcebergHadoopTokenSelectAll [GOOD] >> GenericFederatedQuery::IcebergHadoopTokenSelectConstant >> SystemView::TopPartitionsByCpuFields >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionOperationId [GOOD] >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionFetchToken |74.5%| [TA] $(B)/ydb/core/actorlib_impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:86:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:87:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:90:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:91:2057] recipient: [11:89:2118] Leader for TabletID 72057594037927937 is [11:92:2119] sender: [11:93:2057] recipient: [11:89:2118] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:92:2119] Leader for TabletID 72057594037927937 is [11:92:2119] sender: [11:146:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... 7927937 (actor [16:56:2097]) tablet resolver refreshed! new actor is[16:81:2111] Leader for TabletID 72057594037927937 is [16:81:2111] sender: [16:135:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:57:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:74:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:76:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:78:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:80:2057] recipient: [17:79:2110] Leader for TabletID 72057594037927937 is [17:81:2111] sender: [17:82:2057] recipient: [17:79:2110] !Reboot 72057594037927937 (actor [17:56:2097]) rebooted! !Reboot 72057594037927937 (actor [17:56:2097]) tablet resolver refreshed! new actor is[17:81:2111] Leader for TabletID 72057594037927937 is [17:81:2111] sender: [17:135:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:57:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:74:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:77:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:80:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:81:2057] recipient: [18:79:2110] Leader for TabletID 72057594037927937 is [18:82:2111] sender: [18:83:2057] recipient: [18:79:2110] !Reboot 72057594037927937 (actor [18:56:2097]) rebooted! !Reboot 72057594037927937 (actor [18:56:2097]) tablet resolver refreshed! new actor is[18:82:2111] Leader for TabletID 72057594037927937 is [18:82:2111] sender: [18:136:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:52:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:57:2057] recipient: [19:52:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:74:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:80:2057] recipient: [19:36:2083] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:83:2057] recipient: [19:82:2113] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:84:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:85:2114] sender: [19:86:2057] recipient: [19:82:2113] !Reboot 72057594037927937 (actor [19:56:2097]) rebooted! !Reboot 72057594037927937 (actor [19:56:2097]) tablet resolver refreshed! new actor is[19:85:2114] Leader for TabletID 72057594037927937 is [19:85:2114] sender: [19:139:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:57:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:74:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:80:2057] recipient: [20:36:2083] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:83:2057] recipient: [20:82:2113] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:84:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:85:2114] sender: [20:86:2057] recipient: [20:82:2113] !Reboot 72057594037927937 (actor [20:56:2097]) rebooted! !Reboot 72057594037927937 (actor [20:56:2097]) tablet resolver refreshed! new actor is[20:85:2114] Leader for TabletID 72057594037927937 is [20:85:2114] sender: [20:139:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:52:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:57:2057] recipient: [21:52:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:74:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:81:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:84:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:85:2057] recipient: [21:83:2113] Leader for TabletID 72057594037927937 is [21:86:2114] sender: [21:87:2057] recipient: [21:83:2113] !Reboot 72057594037927937 (actor [21:56:2097]) rebooted! !Reboot 72057594037927937 (actor [21:56:2097]) tablet resolver refreshed! new actor is[21:86:2114] Leader for TabletID 72057594037927937 is [21:86:2114] sender: [21:104:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:57:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:74:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:83:2057] recipient: [22:36:2083] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:86:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:87:2057] recipient: [22:85:2115] Leader for TabletID 72057594037927937 is [22:88:2116] sender: [22:89:2057] recipient: [22:85:2115] !Reboot 72057594037927937 (actor [22:56:2097]) rebooted! !Reboot 72057594037927937 (actor [22:56:2097]) tablet resolver refreshed! new actor is[22:88:2116] Leader for TabletID 72057594037927937 is [22:88:2116] sender: [22:142:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:52:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:57:2057] recipient: [23:52:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:74:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:83:2057] recipient: [23:36:2083] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:86:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:87:2057] recipient: [23:85:2115] Leader for TabletID 72057594037927937 is [23:88:2116] sender: [23:89:2057] recipient: [23:85:2115] !Reboot 72057594037927937 (actor [23:56:2097]) rebooted! !Reboot 72057594037927937 (actor [23:56:2097]) tablet resolver refreshed! new actor is[23:88:2116] Leader for TabletID 72057594037927937 is [23:88:2116] sender: [23:142:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:57:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:74:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:84:2057] recipient: [24:36:2083] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:87:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:88:2057] recipient: [24:86:2115] Leader for TabletID 72057594037927937 is [24:89:2116] sender: [24:90:2057] recipient: [24:86:2115] !Reboot 72057594037927937 (actor [24:56:2097]) rebooted! !Reboot 72057594037927937 (actor [24:56:2097]) tablet resolver refreshed! new actor is[24:89:2116] Leader for TabletID 72057594037927937 is [24:89:2116] sender: [24:143:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:50:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:57:2057] recipient: [25:50:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:74:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:87:2057] recipient: [25:36:2083] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:90:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:91:2057] recipient: [25:89:2118] Leader for TabletID 72057594037927937 is [25:92:2119] sender: [25:93:2057] recipient: [25:89:2118] !Reboot 72057594037927937 (actor [25:56:2097]) rebooted! !Reboot 72057594037927937 (actor [25:56:2097]) tablet resolver refreshed! new actor is[25:92:2119] Leader for TabletID 72057594037927937 is [25:92:2119] sender: [25:146:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:51:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:57:2057] recipient: [26:51:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:74:2057] recipient: [26:14:2061] !Reboot 72057594037927937 (actor [26:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:87:2057] recipient: [26:36:2083] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:90:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:91:2057] recipient: [26:89:2118] Leader for TabletID 72057594037927937 is [26:92:2119] sender: [26:93:2057] recipient: [26:89:2118] !Reboot 72057594037927937 (actor [26:56:2097]) rebooted! !Reboot 72057594037927937 (actor [26:56:2097]) tablet resolver refreshed! new actor is[26:92:2119] Leader for TabletID 72057594037927937 is [26:92:2119] sender: [26:146:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:54:2057] recipient: [27:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:54:2057] recipient: [27:51:2095] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:57:2057] recipient: [27:51:2095] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:74:2057] recipient: [27:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestRenameToLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvGetStorageChannelStatus ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:82:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:86:2057] recipient: [7:84:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:84:2115] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:141:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:82:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:85:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:84:2115] Leader for TabletID 72057594037927937 is [8:87:2116] sender: [8:88:2057] recipient: [8:84:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:87:2116] Leader for TabletID 72057594037927937 is [8:87:2116] sender: [8:141:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:84:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:88:2057] recipient: [9:86:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:86:2117] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:143:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2117] Leader for TabletID 72057594037927937 is [10:89:2118] sender: [10:90:2057] recipient: [10:86:2117] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2118] Leader for TabletID 72057594037927937 is [10:89:2118] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:86:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:89:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:90:2057] recipient: [11:88:2119] Leader for TabletID 72057594037927937 is [11:91:2120] sender: [11:92:2057] recipient: [11:88:2119] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:91:2120] Leader for TabletID 72057594037927937 is [11:91:2120] sender: [11:145:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Re ... is [18:56:2097] sender: [18:95:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:96:2057] recipient: [18:94:2123] Leader for TabletID 72057594037927937 is [18:97:2124] sender: [18:98:2057] recipient: [18:94:2123] !Reboot 72057594037927937 (actor [18:56:2097]) rebooted! !Reboot 72057594037927937 (actor [18:56:2097]) tablet resolver refreshed! new actor is[18:97:2124] Leader for TabletID 72057594037927937 is [18:97:2124] sender: [18:151:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:52:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:57:2057] recipient: [19:52:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:74:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:57:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:74:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:52:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:57:2057] recipient: [21:52:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:74:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:76:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:78:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:80:2057] recipient: [21:79:2110] Leader for TabletID 72057594037927937 is [21:81:2111] sender: [21:82:2057] recipient: [21:79:2110] !Reboot 72057594037927937 (actor [21:56:2097]) rebooted! !Reboot 72057594037927937 (actor [21:56:2097]) tablet resolver refreshed! new actor is[21:81:2111] Leader for TabletID 72057594037927937 is [21:81:2111] sender: [21:135:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:57:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:74:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:56:2097]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:76:2057] recipient: [22:36:2083] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:79:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:80:2057] recipient: [22:78:2110] Leader for TabletID 72057594037927937 is [22:81:2111] sender: [22:82:2057] recipient: [22:78:2110] !Reboot 72057594037927937 (actor [22:56:2097]) rebooted! !Reboot 72057594037927937 (actor [22:56:2097]) tablet resolver refreshed! new actor is[22:81:2111] Leader for TabletID 72057594037927937 is [22:81:2111] sender: [22:135:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:52:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:57:2057] recipient: [23:52:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:74:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:77:2057] recipient: [23:36:2083] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:80:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:81:2057] recipient: [23:79:2110] Leader for TabletID 72057594037927937 is [23:82:2111] sender: [23:83:2057] recipient: [23:79:2110] !Reboot 72057594037927937 (actor [23:56:2097]) rebooted! !Reboot 72057594037927937 (actor [23:56:2097]) tablet resolver refreshed! new actor is[23:82:2111] Leader for TabletID 72057594037927937 is [23:82:2111] sender: [23:136:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:57:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:74:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:80:2057] recipient: [24:36:2083] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:83:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:84:2057] recipient: [24:82:2113] Leader for TabletID 72057594037927937 is [24:85:2114] sender: [24:86:2057] recipient: [24:82:2113] !Reboot 72057594037927937 (actor [24:56:2097]) rebooted! !Reboot 72057594037927937 (actor [24:56:2097]) tablet resolver refreshed! new actor is[24:85:2114] Leader for TabletID 72057594037927937 is [24:85:2114] sender: [24:139:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:50:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:57:2057] recipient: [25:50:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:74:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:80:2057] recipient: [25:36:2083] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:83:2057] recipient: [25:82:2113] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:84:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:85:2114] sender: [25:86:2057] recipient: [25:82:2113] !Reboot 72057594037927937 (actor [25:56:2097]) rebooted! !Reboot 72057594037927937 (actor [25:56:2097]) tablet resolver refreshed! new actor is[25:85:2114] Leader for TabletID 72057594037927937 is [25:85:2114] sender: [25:139:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:51:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:57:2057] recipient: [26:51:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:74:2057] recipient: [26:14:2061] !Reboot 72057594037927937 (actor [26:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:81:2057] recipient: [26:36:2083] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:84:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:85:2057] recipient: [26:83:2113] Leader for TabletID 72057594037927937 is [26:86:2114] sender: [26:87:2057] recipient: [26:83:2113] !Reboot 72057594037927937 (actor [26:56:2097]) rebooted! !Reboot 72057594037927937 (actor [26:56:2097]) tablet resolver refreshed! new actor is[26:86:2114] Leader for TabletID 72057594037927937 is [26:86:2114] sender: [26:140:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:54:2057] recipient: [27:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:54:2057] recipient: [27:51:2095] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:57:2057] recipient: [27:51:2095] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:74:2057] recipient: [27:14:2061] !Reboot 72057594037927937 (actor [27:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:84:2057] recipient: [27:36:2083] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:87:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:88:2057] recipient: [27:86:2116] Leader for TabletID 72057594037927937 is [27:89:2117] sender: [27:90:2057] recipient: [27:86:2116] !Reboot 72057594037927937 (actor [27:56:2097]) rebooted! !Reboot 72057594037927937 (actor [27:56:2097]) tablet resolver refreshed! new actor is[27:89:2117] Leader for TabletID 72057594037927937 is [27:89:2117] sender: [27:143:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:54:2057] recipient: [28:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:54:2057] recipient: [28:51:2095] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:57:2057] recipient: [28:51:2095] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:74:2057] recipient: [28:14:2061] !Reboot 72057594037927937 (actor [28:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:84:2057] recipient: [28:36:2083] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:87:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:88:2057] recipient: [28:86:2116] Leader for TabletID 72057594037927937 is [28:89:2117] sender: [28:90:2057] recipient: [28:86:2116] !Reboot 72057594037927937 (actor [28:56:2097]) rebooted! !Reboot 72057594037927937 (actor [28:56:2097]) tablet resolver refreshed! new actor is[28:89:2117] Leader for TabletID 72057594037927937 is [28:89:2117] sender: [28:143:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:54:2057] recipient: [29:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:54:2057] recipient: [29:51:2095] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:57:2057] recipient: [29:51:2095] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:74:2057] recipient: [29:14:2061] !Reboot 72057594037927937 (actor [29:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:85:2057] recipient: [29:36:2083] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:88:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:89:2057] recipient: [29:87:2116] Leader for TabletID 72057594037927937 is [29:90:2117] sender: [29:91:2057] recipient: [29:87:2116] !Reboot 72057594037927937 (actor [29:56:2097]) rebooted! !Reboot 72057594037927937 (actor [29:56:2097]) tablet resolver refreshed! new actor is[29:90:2117] Leader for TabletID 72057594037927937 is [29:90:2117] sender: [29:144:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:57:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:74:2057] recipient: [30:14:2061] >> TCertificateCheckerTest::CheckSubjectDns >> TKeyValueTest::TestInlineCopyRangeWorks [GOOD] >> TKeyValueTest::TestInlineCopyRangeWorksNewApi >> TKeyValueTest::TestCopyRangeWorksNewApi [GOOD] >> TKeyValueTest::TestCopyRangeToLongKey >> ColumnBuildTest::BuildColumnDoesnotRestoreDeletedRows [GOOD] |74.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest |74.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOkNewApi ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::TableStatsHistograms [GOOD] Test command err: 2025-04-17T17:10:20.933767Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:10:20.978135Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:10:20.980685Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:126:2151] 2025-04-17T17:10:20.983344Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:10:21.214258Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:10:21.246745Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:10:21.687979Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:10:21.709849Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-17T17:10:21.710345Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-17T17:10:21.710840Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-17T17:10:21.712714Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:10:21.713158Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:10:21.713219Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:189:2151] in generation 2 2025-04-17T17:10:21.713524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:10:21.713568Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:10:22.041964Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:10:22.276585Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-17T17:10:22.277345Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:10:22.277781Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-04-17T17:10:22.278162Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-17T17:10:22.278534Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-17T17:10:22.278740Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:10:22.280085Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:10:22.288478Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:10:22.290115Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-17T17:10:22.290584Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-17T17:10:22.291389Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T17:10:22.291593Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:10:22.292391Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-17T17:10:22.292776Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T17:10:22.292994Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T17:10:22.293596Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-17T17:10:22.293797Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T17:10:22.294581Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:10:22.294789Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:10:22.294999Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-04-17T17:10:22.319010Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:126:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-17T17:10:22.319253Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:10:22.320067Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T17:10:22.320852Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-17T17:10:22.321003Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-17T17:10:22.321336Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-17T17:10:22.321543Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T17:10:22.321767Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-17T17:10:22.322095Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-17T17:10:22.322137Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T17:10:22.322438Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-17T17:10:22.322469Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-17T17:10:22.322506Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-17T17:10:22.322536Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-17T17:10:22.322581Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-17T17:10:22.322607Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-17T17:10:22.322635Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-17T17:10:22.322664Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-17T17:10:22.322686Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-17T17:10:22.352562Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T17:10:22.352776Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T17:10:22.352945Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-17T17:10:22.352988Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-17T17:10:22.353064Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-17T17:10:22.353758Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:10:22.353824Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:10:22.353876Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-04-17T17:10:22.353961Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:126:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-17T17:10:22.353992Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:10:22.354165Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-17T17:10:22.354221Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T17:10:22.354304Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-17T17:10:22.354349Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-17T17:10:22.367045Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-17T17:10:22.367196Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:10:22.367705Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:10:22.367810Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:10:22.367922Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T17:10:22.368013Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:10:22.372149Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T17:10:22.372296Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-17T17:10:22.372341Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-17T17:10:22.372390Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T17:10:22.372423Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-17T17:10:22.372467Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-17T17:10:22.372502Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-17T17:10:22.372678Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-17T17:10:22.372712Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T17:10:22.372731Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-17T17:10:22.372753Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-17T17:10:22.372771Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-17T17:10:22.372826Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T17:10:22.372845Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-17T17:10:22.372879Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T17:10:22.372908Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T17:10:22.372962Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-17T17:10:22.373004Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-17T17:10:22.373034Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-17T17:10:22.373176Z node 1 :TX_DATA ... 4MyReads MyWrites\205\004?\000\206\202\024Reply\024Write?\000?\000 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\002\203\004\205\002\203\001H\01056$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000\013?\022\003?\020\231\017\001\005?\026\003?\024\322ImInShard111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111\001\007\002\000\003\005?\010?\014\006\002?\006?$\000\003?\014?\014\037/ \0018\000" TxId: 1002 ExecLevel: 0 Flags: 0 2025-04-17T17:11:17.526064Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:11:17.526119Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T17:11:17.526742Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1002] at 9437184 on unit CheckDataTx 2025-04-17T17:11:17.526782Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1002] at 9437184 is Executed 2025-04-17T17:11:17.526805Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1002] at 9437184 executing on unit CheckDataTx 2025-04-17T17:11:17.526829Z node 3 :TX_DATASHARD TRACE: Add [0:1002] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T17:11:17.526850Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1002] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T17:11:17.526878Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-17T17:11:17.526915Z node 3 :TX_DATASHARD TRACE: Activated operation [0:1002] at 9437184 2025-04-17T17:11:17.526940Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1002] at 9437184 is Executed 2025-04-17T17:11:17.526956Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1002] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-17T17:11:17.526972Z node 3 :TX_DATASHARD TRACE: Add [0:1002] at 9437184 to execution unit ExecuteDataTx 2025-04-17T17:11:17.526987Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1002] at 9437184 on unit ExecuteDataTx 2025-04-17T17:11:17.527014Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-17T17:11:17.527224Z node 3 :TX_DATASHARD TRACE: Executed operation [0:1002] at tablet 9437184 with status COMPLETE 2025-04-17T17:11:17.527261Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:1002] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 109, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-17T17:11:17.527296Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1002] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T17:11:17.527316Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1002] at 9437184 executing on unit ExecuteDataTx 2025-04-17T17:11:17.527336Z node 3 :TX_DATASHARD TRACE: Add [0:1002] at 9437184 to execution unit FinishPropose 2025-04-17T17:11:17.527355Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1002] at 9437184 on unit FinishPropose 2025-04-17T17:11:17.527376Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1002] at 9437184 is DelayComplete 2025-04-17T17:11:17.527394Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1002] at 9437184 executing on unit FinishPropose 2025-04-17T17:11:17.527414Z node 3 :TX_DATASHARD TRACE: Add [0:1002] at 9437184 to execution unit CompletedOperations 2025-04-17T17:11:17.527434Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1002] at 9437184 on unit CompletedOperations 2025-04-17T17:11:17.527462Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1002] at 9437184 is Executed 2025-04-17T17:11:17.527478Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1002] at 9437184 executing on unit CompletedOperations 2025-04-17T17:11:17.527495Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:1002] at 9437184 has finished 2025-04-17T17:11:17.549648Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T17:11:17.549711Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:1002] at 9437184 on unit FinishPropose 2025-04-17T17:11:17.549745Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 1002 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2025-04-17T17:11:17.549804Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 .2025-04-17T17:11:17.555851Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [3:99:2134], Recipient [3:232:2225]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 99 RawX2: 12884904022 } 2025-04-17T17:11:17.555910Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2025-04-17T17:11:17.556596Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:4542:6461], Recipient [3:232:2225]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:11:17.556629Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:11:17.556657Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:4541:6460], serverId# [3:4542:6461], sessionId# [0:0:0] 2025-04-17T17:11:17.556871Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:99:2134], Recipient [3:232:2225]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 99 RawX2: 12884904022 } TxBody: "\032\265\002\037\000\005\205\n\205\000\205\004?\000\205\002\202\0041\034MyReads MyWrites\205\004?\000\206\202\024Reply\024Write?\000?\000 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\002\203\004\205\002\203\001H\01056$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000\013?\022\003?\020\235\017\001\005?\026\003?\024\322ImInShard111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111\001\007\002\000\003\005?\010?\014\006\002?\006?$\000\003?\014?\014\037/ \0018\000" TxId: 1003 ExecLevel: 0 Flags: 0 2025-04-17T17:11:17.556900Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:11:17.556968Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T17:11:17.557311Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1003] at 9437184 on unit CheckDataTx 2025-04-17T17:11:17.557348Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1003] at 9437184 is Executed 2025-04-17T17:11:17.557368Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1003] at 9437184 executing on unit CheckDataTx 2025-04-17T17:11:17.557390Z node 3 :TX_DATASHARD TRACE: Add [0:1003] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T17:11:17.557410Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1003] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T17:11:17.557438Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-17T17:11:17.557476Z node 3 :TX_DATASHARD TRACE: Activated operation [0:1003] at 9437184 2025-04-17T17:11:17.557502Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1003] at 9437184 is Executed 2025-04-17T17:11:17.557518Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1003] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-17T17:11:17.557535Z node 3 :TX_DATASHARD TRACE: Add [0:1003] at 9437184 to execution unit ExecuteDataTx 2025-04-17T17:11:17.557550Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1003] at 9437184 on unit ExecuteDataTx 2025-04-17T17:11:17.557577Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-17T17:11:17.557786Z node 3 :TX_DATASHARD TRACE: Executed operation [0:1003] at tablet 9437184 with status COMPLETE 2025-04-17T17:11:17.557821Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:1003] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 109, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-17T17:11:17.557857Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1003] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T17:11:17.557875Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1003] at 9437184 executing on unit ExecuteDataTx 2025-04-17T17:11:17.557898Z node 3 :TX_DATASHARD TRACE: Add [0:1003] at 9437184 to execution unit FinishPropose 2025-04-17T17:11:17.557918Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1003] at 9437184 on unit FinishPropose 2025-04-17T17:11:17.557940Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1003] at 9437184 is DelayComplete 2025-04-17T17:11:17.557958Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1003] at 9437184 executing on unit FinishPropose 2025-04-17T17:11:17.557977Z node 3 :TX_DATASHARD TRACE: Add [0:1003] at 9437184 to execution unit CompletedOperations 2025-04-17T17:11:17.557998Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1003] at 9437184 on unit CompletedOperations 2025-04-17T17:11:17.558024Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1003] at 9437184 is Executed 2025-04-17T17:11:17.558039Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1003] at 9437184 executing on unit CompletedOperations 2025-04-17T17:11:17.558055Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:1003] at 9437184 has finished 2025-04-17T17:11:17.694982Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 9437184, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-17T17:11:17.695034Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 9437184, table# 1001, finished edge# 0, front# 0 2025-04-17T17:11:17.696549Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T17:11:17.696593Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:1003] at 9437184 on unit FinishPropose 2025-04-17T17:11:17.696624Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 1003 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 3 ms, status: COMPLETE 2025-04-17T17:11:17.696678Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:11:17.698792Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [3:229:2224], Recipient [3:232:2225]: NKikimr::TEvTablet::TEvFollowerGcApplied .2025-04-17T17:11:17.702760Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:4556:6474], Recipient [3:232:2225]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:11:17.703280Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:11:17.703542Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:4555:6473], serverId# [3:4556:6474], sessionId# [0:0:0] 2025-04-17T17:11:17.704862Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553160, Sender [3:4554:6472], Recipient [3:232:2225]: NKikimrTxDataShard.TEvGetTableStats TableId: 13 { InMemSize: 0 LastAccessTime: 1717 LastUpdateTime: 1717 } >> TKeyValueTest::TestSetExecutorFastLogPolicy [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::BuildColumnDoesnotRestoreDeletedRows [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:10:57.658912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:10:57.659338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:10:57.659694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:10:57.659914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:10:57.660138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:10:57.660510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:10:57.660894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:10:57.661638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:10:57.662528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:10:58.155995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:10:58.157008Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:10:58.253846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:10:58.255560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:10:58.258484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:10:58.312650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:10:58.314001Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:10:58.319398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:10:58.319553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:10:58.339477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:10:58.347510Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:10:58.347786Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:10:58.349418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:10:58.349950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:10:58.350169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:10:58.351407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:10:58.377138Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:10:58.755524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:10:58.755698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:10:58.755836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:10:58.755994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:10:58.756037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:10:58.771678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:10:58.774696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:10:58.776228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:10:58.776836Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:10:58.777277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:10:58.777541Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:10:58.795205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:10:58.795500Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:10:58.795726Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:10:58.817799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:10:58.817852Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:10:58.818149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:10:58.818330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:10:58.823656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:10:58.829504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:10:58.829679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:10:58.830460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:10:58.830587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:10:58.830627Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:10:58.831093Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:10:58.831153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:10:58.831288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:10:58.831385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:10:58.833311Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:10:58.833348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:10:58.833513Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:10:58.833550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:10:58.833870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:10:58.833924Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:10:58.834006Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:10:58.834038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:10:58.834073Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:10:58.834110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:10:58.834153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:10:58.834187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:10:58.834214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:10:58.834240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:10:58.834289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:10:58.834323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:10:58.834355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:10:58.835895Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:10:58.835999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:10:58.836037Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... MKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'27))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-17T17:11:21.938012Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2049:3912], Recipient [1:752:2641]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'28))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-17T17:11:21.947889Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2050:3913], Recipient [1:752:2641]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'29))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-17T17:11:21.978629Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2051:3914], Recipient [1:752:2641]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'30))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-17T17:11:22.090146Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2052:3915], Recipient [1:752:2641]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'31))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-17T17:11:22.165352Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2053:3916], Recipient [1:752:2641]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'32))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-17T17:11:22.206769Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2054:3917], Recipient [1:752:2641]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'33))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-17T17:11:22.213714Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2055:3918], Recipient [1:752:2641]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'34))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-17T17:11:22.237513Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2056:3919], Recipient [1:752:2641]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'35))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-17T17:11:22.249496Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2057:3920], Recipient [1:752:2641]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'36))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-17T17:11:22.255943Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2058:3921], Recipient [1:752:2641]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'37))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-17T17:11:22.262241Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2059:3922], Recipient [1:752:2641]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'38))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-17T17:11:22.271996Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2060:3923], Recipient [1:752:2641]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'39))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-17T17:11:22.278668Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2061:3924], Recipient [1:752:2641]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'40))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-17T17:11:22.284895Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2062:3925], Recipient [1:752:2641]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'41))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-17T17:11:22.291330Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2063:3926], Recipient [1:752:2641]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'42))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-17T17:11:22.306839Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2064:3927], Recipient [1:752:2641]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'43))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-17T17:11:22.322880Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2065:3928], Recipient [1:752:2641]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'44))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-17T17:11:22.334347Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2066:3929], Recipient [1:752:2641]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'45))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-17T17:11:22.341333Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2067:3930], Recipient [1:752:2641]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'46))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-17T17:11:22.347947Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2068:3931], Recipient [1:752:2641]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'47))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-17T17:11:22.354661Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2069:3932], Recipient [1:752:2641]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'48))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-17T17:11:22.361703Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2070:3933], Recipient [1:752:2641]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'49))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-17T17:11:22.368572Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2071:3934], Recipient [1:752:2641]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'50))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } >> TCertificateAuthUtilsTest::ClientCertAuthorizationParamsMatch [GOOD] |74.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |74.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |74.5%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp/test-results/unittest/{meta.json ... results_accumulator.log} |74.5%| [TA] {RESULT} $(B)/ydb/core/kesus/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> GenericFederatedQuery::IcebergHadoopBasicSelectAll [GOOD] >> GenericFederatedQuery::IcebergHadoopBasicSelectConstant |74.6%| [TA] {RESULT} $(B)/ydb/core/actorlib_impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |74.6%| [LD] {RESULT} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |74.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TTransferTests::CreateWrongFlushIntervalIsSmall [GOOD] >> TTransferTests::CreateWrongFlushIntervalIsBig |74.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TCertificateAuthUtilsTest::ClientCertAuthorizationParamsMatch [GOOD] >> TTransferTests::CreateDropRecreate [GOOD] >> TTransferTests::ConsistencyLevel |74.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest |74.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> GenericFederatedQuery::PostgreSQLOnPremSelectAll [GOOD] >> GenericFederatedQuery::PostgreSQLOnPremSelectConstant |74.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TCertificateCheckerTest::CheckSubjectDns [GOOD] |74.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/batch_operations/ydb-core-kqp-ut-batch_operations |74.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/batch_operations/ydb-core-kqp-ut-batch_operations |74.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/batch_operations/ydb-core-kqp-ut-batch_operations >> ShowCreateView::WithTablePathPrefix ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestSetExecutorFastLogPolicy [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:76:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:79:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:78:2110] Leader for TabletID 72057594037927937 is [4:81:2111] sender: [4:82:2057] recipient: [4:78:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:81:2111] Leader for TabletID 72057594037927937 is [4:81:2111] sender: [4:135:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:76:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:78:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:79:2110] Leader for TabletID 72057594037927937 is [5:81:2111] sender: [5:82:2057] recipient: [5:79:2110] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:81:2111] Leader for TabletID 72057594037927937 is [5:81:2111] sender: [5:135:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:77:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:81:2057] recipient: [6:79:2110] Leader for TabletID 72057594037927937 is [6:82:2111] sender: [6:83:2057] recipient: [6:79:2110] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:82:2111] Leader for TabletID 72057594037927937 is [6:82:2111] sender: [6:136:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:79:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:82:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:83:2057] recipient: [7:81:2112] Leader for TabletID 72057594037927937 is [7:84:2113] sender: [7:85:2057] recipient: [7:81:2112] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:84:2113] Leader for TabletID 72057594037927937 is [7:84:2113] sender: [7:138:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:79:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:82:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:81:2112] Leader for TabletID 72057594037927937 is [8:84:2113] sender: [8:85:2057] recipient: [8:81:2112] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:84:2113] Leader for TabletID 72057594037927937 is [8:84:2113] sender: [8:138:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:80:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:84:2057] recipient: [9:82:2112] Leader for TabletID 72057594037927937 is [9:85:2113] sender: [9:86:2057] recipient: [9:82:2112] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:85:2113] Leader for TabletID 72057594037927937 is [9:85:2113] sender: [9:139:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:83:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:86:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:85:2115] Leader for TabletID 72057594037927937 is [10:88:2116] sender: [10:89:2057] recipient: [10:85:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:88:2116] Leader for TabletID 72057594037927937 is [10:88:2116] sender: [10:142:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:83:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:86:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:87:2057] recipient: [11:85:2115] Leader for TabletID 72057594037927937 is [11:88:2116] sender: [11:89:2057] recipient: [11:85:2115] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:88:2116] Leader for TabletID 72057594037927937 is [11:88:2116] sender: [11:142:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:84:2057] recipient: [12:36:2083] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:87:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:88:2057] recipient: [12:86:2115] Leader for TabletID 72057594037927937 is [12:89:2116] sender: [12:90:2057] recipient: [12:86:2115] !Reboot 72057594037927937 (actor [12:56:2097]) rebooted! !Reboot 72057594037927937 (actor [12:56:2097]) tablet resolver refreshed! new actor is[12:89:2116] Leader for TabletID 72057594037927937 is [12:89:2116] sender: [12:143:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:50:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:57:2057] recipient: [13:50:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:74:2057] recipient: [13:14:2061] >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOk [GOOD] |74.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest |74.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TCertificateCheckerTest::CheckSubjectDns [GOOD] >> DataShardWrite::UpsertImmediate [GOOD] >> DataShardWrite::UpsertImmediateManyColumns >> TTransferTests::CreateWrongFlushIntervalIsBig [GOOD] >> DataShardWrite::UpsertPrepared+Volatile [GOOD] >> DataShardWrite::UpsertPrepared-Volatile >> SystemView::ShowCreateTablePartitionByHash ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOk [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:82:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:83:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:83:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:104:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:86:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:107:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:86:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:89:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:90:2057] recipient: [11:88:2117] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:92:2057] recipient: [11:88:2117] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:91:2118] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:145:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (a ... 594037927937 (actor [23:56:2097]) tablet resolver refreshed! new actor is[23:85:2114] Leader for TabletID 72057594037927937 is [23:85:2114] sender: [23:139:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:57:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:74:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:81:2057] recipient: [24:36:2083] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:84:2057] recipient: [24:83:2113] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:85:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:86:2114] sender: [24:87:2057] recipient: [24:83:2113] !Reboot 72057594037927937 (actor [24:56:2097]) rebooted! !Reboot 72057594037927937 (actor [24:56:2097]) tablet resolver refreshed! new actor is[24:86:2114] Leader for TabletID 72057594037927937 is [24:86:2114] sender: [24:140:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:50:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:57:2057] recipient: [25:50:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:74:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:84:2057] recipient: [25:36:2083] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:87:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:88:2057] recipient: [25:86:2116] Leader for TabletID 72057594037927937 is [25:89:2117] sender: [25:90:2057] recipient: [25:86:2116] !Reboot 72057594037927937 (actor [25:56:2097]) rebooted! !Reboot 72057594037927937 (actor [25:56:2097]) tablet resolver refreshed! new actor is[25:89:2117] Leader for TabletID 72057594037927937 is [25:89:2117] sender: [25:143:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:51:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:57:2057] recipient: [26:51:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:74:2057] recipient: [26:14:2061] !Reboot 72057594037927937 (actor [26:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:84:2057] recipient: [26:36:2083] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:87:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:88:2057] recipient: [26:86:2116] Leader for TabletID 72057594037927937 is [26:89:2117] sender: [26:90:2057] recipient: [26:86:2116] !Reboot 72057594037927937 (actor [26:56:2097]) rebooted! !Reboot 72057594037927937 (actor [26:56:2097]) tablet resolver refreshed! new actor is[26:89:2117] Leader for TabletID 72057594037927937 is [26:89:2117] sender: [26:143:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:54:2057] recipient: [27:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:54:2057] recipient: [27:51:2095] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:57:2057] recipient: [27:51:2095] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:74:2057] recipient: [27:14:2061] !Reboot 72057594037927937 (actor [27:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:85:2057] recipient: [27:36:2083] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:88:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:89:2057] recipient: [27:87:2116] Leader for TabletID 72057594037927937 is [27:90:2117] sender: [27:91:2057] recipient: [27:87:2116] !Reboot 72057594037927937 (actor [27:56:2097]) rebooted! !Reboot 72057594037927937 (actor [27:56:2097]) tablet resolver refreshed! new actor is[27:90:2117] Leader for TabletID 72057594037927937 is [27:90:2117] sender: [27:144:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:54:2057] recipient: [28:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:54:2057] recipient: [28:51:2095] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:57:2057] recipient: [28:51:2095] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:74:2057] recipient: [28:14:2061] !Reboot 72057594037927937 (actor [28:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:88:2057] recipient: [28:36:2083] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:91:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:92:2057] recipient: [28:90:2119] Leader for TabletID 72057594037927937 is [28:93:2120] sender: [28:94:2057] recipient: [28:90:2119] !Reboot 72057594037927937 (actor [28:56:2097]) rebooted! !Reboot 72057594037927937 (actor [28:56:2097]) tablet resolver refreshed! new actor is[28:93:2120] Leader for TabletID 72057594037927937 is [28:93:2120] sender: [28:147:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:54:2057] recipient: [29:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:54:2057] recipient: [29:51:2095] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:57:2057] recipient: [29:51:2095] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:74:2057] recipient: [29:14:2061] !Reboot 72057594037927937 (actor [29:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:88:2057] recipient: [29:36:2083] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:91:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:92:2057] recipient: [29:90:2119] Leader for TabletID 72057594037927937 is [29:93:2120] sender: [29:94:2057] recipient: [29:90:2119] !Reboot 72057594037927937 (actor [29:56:2097]) rebooted! !Reboot 72057594037927937 (actor [29:56:2097]) tablet resolver refreshed! new actor is[29:93:2120] Leader for TabletID 72057594037927937 is [29:93:2120] sender: [29:147:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:57:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:74:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:89:2057] recipient: [30:36:2083] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:92:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:93:2057] recipient: [30:91:2119] Leader for TabletID 72057594037927937 is [30:94:2120] sender: [30:95:2057] recipient: [30:91:2119] !Reboot 72057594037927937 (actor [30:56:2097]) rebooted! !Reboot 72057594037927937 (actor [30:56:2097]) tablet resolver refreshed! new actor is[30:94:2120] Leader for TabletID 72057594037927937 is [30:94:2120] sender: [30:148:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:57:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:74:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:91:2057] recipient: [31:36:2083] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:94:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:95:2057] recipient: [31:93:2121] Leader for TabletID 72057594037927937 is [31:96:2122] sender: [31:97:2057] recipient: [31:93:2121] !Reboot 72057594037927937 (actor [31:56:2097]) rebooted! !Reboot 72057594037927937 (actor [31:56:2097]) tablet resolver refreshed! new actor is[31:96:2122] Leader for TabletID 72057594037927937 is [31:96:2122] sender: [31:150:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:57:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:74:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:91:2057] recipient: [32:36:2083] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:94:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:95:2057] recipient: [32:93:2121] Leader for TabletID 72057594037927937 is [32:96:2122] sender: [32:97:2057] recipient: [32:93:2121] !Reboot 72057594037927937 (actor [32:56:2097]) rebooted! !Reboot 72057594037927937 (actor [32:56:2097]) tablet resolver refreshed! new actor is[32:96:2122] Leader for TabletID 72057594037927937 is [32:96:2122] sender: [32:150:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:51:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:57:2057] recipient: [33:51:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:74:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:92:2057] recipient: [33:36:2083] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:94:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:96:2057] recipient: [33:95:2121] Leader for TabletID 72057594037927937 is [33:97:2122] sender: [33:98:2057] recipient: [33:95:2121] !Reboot 72057594037927937 (actor [33:56:2097]) rebooted! !Reboot 72057594037927937 (actor [33:56:2097]) tablet resolver refreshed! new actor is[33:97:2122] Leader for TabletID 72057594037927937 is [33:97:2122] sender: [33:151:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:57:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:74:2057] recipient: [34:14:2061] >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] >> TKeyValueTest::TestGetStatusWorks >> TCertificateAuthUtilsTest::GenerateAndVerifyCertificates >> SystemView::ShowCreateTableDefaultLiteral >> TPQCachingProxyTest::TestWrongSessionOrGeneration >> DataShardWrite::WriteImmediateBadRequest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_transfer/unittest >> TTransferTests::CreateWrongFlushIntervalIsBig [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:10:58.835072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:10:58.835153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:10:58.835187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:10:58.835237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:10:58.835275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:10:58.835304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:10:58.835368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:10:58.835443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:10:58.835754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:10:59.461287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:10:59.461333Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:10:59.530984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:10:59.532993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:10:59.533351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:10:59.613755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:10:59.615407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:10:59.624708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:10:59.625174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:10:59.666269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:10:59.679397Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:10:59.679870Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:10:59.680850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:10:59.681353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:10:59.681592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:10:59.683086Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:10:59.772980Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:11:00.462740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:11:00.464457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:11:00.466200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:11:00.485335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:11:00.486063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:11:00.512634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:11:00.514737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:11:00.515938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:11:00.516599Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:11:00.516847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:11:00.517316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:11:00.532007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:11:00.532064Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:11:00.532303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:11:00.538408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:11:00.538461Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:11:00.538500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:11:00.538558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:11:00.553094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:11:00.566770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:11:00.568199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:11:00.579202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:11:00.579996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:11:00.580438Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:11:00.582597Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:11:00.582815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:11:00.584424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:11:00.585166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:11:00.598895Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:11:00.599376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:11:00.600359Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:11:00.600965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:11:00.602651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:11:00.603205Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:11:00.604222Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:11:00.604856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:11:00.605294Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:11:00.605973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:11:00.606225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:11:00.606533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:11:00.606762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:11:00.606979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:11:00.607255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:11:00.607697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:11:00.607928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:11:00.618001Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:11:00.618891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:11:00.619350Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:11:26.892961Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:11:26.893865Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:11:26.896354Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:11:26.897170Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 25769805931 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:11:26.897694Z node 6 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:11:26.899190Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:11:26.899458Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:11:26.912963Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:11:26.914116Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:11:26.930006Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:11:26.930307Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:11:26.931505Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:11:26.931793Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [6:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:11:26.937086Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:11:26.937664Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:11:26.938481Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:11:26.938996Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:11:26.939534Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:11:26.939802Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:11:26.940065Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:11:26.945650Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:11:26.945946Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:11:26.946218Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:11:26.946521Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:11:26.947077Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:11:26.947358Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:11:26.949349Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:11:26.950142Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:11:26.950399Z node 6 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-17T17:11:26.950713Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-17T17:11:26.956329Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:11:26.957701Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-17T17:11:26.975793Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-17T17:11:26.977616Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-17T17:11:26.986857Z node 6 :TX_PROXY DEBUG: actor# [6:271:2262] Bootstrap 2025-04-17T17:11:27.335340Z node 6 :TX_PROXY DEBUG: actor# [6:271:2262] Become StateWork (SchemeCache [6:276:2267]) 2025-04-17T17:11:27.336670Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [6:275:2266], Recipient [6:125:2151]: {TEvModifySchemeTransaction txid# 101 TabletId# 72057594046678944} 2025-04-17T17:11:27.336741Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-17T17:11:27.345215Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTransfer Replication { Name: "Transfer" Config { TransferSpecific { Target { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot2/Table" } Batching { FlushIntervalMilliSeconds: 86400001 } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:11:27.346613Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateReplication Propose: opId# 101:0, path# /MyRoot/Transfer 2025-04-17T17:11:27.347210Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusInvalidParameter, reason: Flush interval must be less than or equal to 24 hours, at schemeshard: 72057594046678944 2025-04-17T17:11:27.348154Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-17T17:11:27.350350Z node 6 :TX_PROXY DEBUG: actor# [6:271:2262] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T17:11:27.373068Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusInvalidParameter Reason: "Flush interval must be less than or equal to 24 hours" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:11:27.374507Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Flush interval must be less than or equal to 24 hours, operation: CREATE TRANSFER, path: /MyRoot/Transfer 2025-04-17T17:11:27.374799Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:11:27.377083Z node 6 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-17T17:11:27.378441Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-17T17:11:27.378710Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-17T17:11:27.384200Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [6:288:2279], Recipient [6:125:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:11:27.384487Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:11:27.384782Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-17T17:11:27.385677Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [6:285:2276], Recipient [6:125:2151]: NKikimrScheme.TEvNotifyTxCompletion TxId: 101 2025-04-17T17:11:27.385705Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-17T17:11:27.386269Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-17T17:11:27.386611Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T17:11:27.386923Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [6:286:2277] 2025-04-17T17:11:27.388012Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [6:288:2279], Recipient [6:125:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:11:27.396281Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:11:27.396565Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 2025-04-17T17:11:27.398065Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [6:289:2280], Recipient [6:125:2151]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Transfer" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-04-17T17:11:27.398345Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-17T17:11:27.399155Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Transfer" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:11:27.407053Z node 6 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Transfer" took 1.39ms result status StatusPathDoesNotExist 2025-04-17T17:11:27.407776Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Transfer\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Transfer" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> DataShardWrite::WriteImmediateSeveralOperations >> TCertificateAuthUtilsTest::GenerateAndVerifyCertificates [GOOD] |74.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::OutdatedSession >> SystemView::PartitionStatsOneSchemeShard |74.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TCertificateAuthUtilsTest::GenerateAndVerifyCertificates [GOOD] >> GroupWriteTest::Simple |74.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite-Volatile [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite-Volatile >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionFetchToken [GOOD] |74.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> ColumnBuildTest::CancelBuild >> TPQCachingProxyTest::OutdatedSession [GOOD] >> TTransferTests::ConsistencyLevel [GOOD] >> TTransferTests::Alter >> TPQCachingProxyTest::TestWrongSessionOrGeneration [GOOD] >> DataShardWrite::ExecSQLUpsertImmediate+EvWrite [GOOD] >> DataShardWrite::ExecSQLUpsertImmediate-EvWrite |74.6%| [TA] $(B)/ydb/core/security/certificate_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TPQCachingProxyTest::TestPublishAndForget |74.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> GroupWriteTest::WriteHardRateDispatcher |74.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::OutdatedSession [GOOD] Test command err: 2025-04-17T17:11:29.597302Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:11:29.597414Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:11:29.616250Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:11:29.616350Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 1 2025-04-17T17:11:29.616433Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 1 for session: session1 2025-04-17T17:11:29.616482Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1, Generation: 1 2025-04-17T17:11:29.616570Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 2, killed existing session with older generation ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestWrongSessionOrGeneration [GOOD] Test command err: 2025-04-17T17:11:29.506923Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:11:29.507013Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:11:29.521468Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:11:29.521537Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 2 2025-04-17T17:11:29.521592Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 1 for session: session1 2025-04-17T17:11:29.521625Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1, Generation: 2 2025-04-17T17:11:29.521687Z node 1 :PQ_READ_PROXY INFO: Direct read cache: attempted to register server session: session1:1 with stale generation 1, ignored 2025-04-17T17:11:29.521758Z node 1 :PQ_READ_PROXY ALERT: Direct read cache: tried to stage direct read for session session1 with generation 1, previously had this session with generation 2. Data ignored 2025-04-17T17:11:29.521808Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1, Generation: 1 2025-04-17T17:11:29.521871Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: forget read: 1 for session session1 |74.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |74.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |74.7%| [TA] {RESULT} $(B)/ydb/core/security/certificate_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |74.7%| [LD] {RESULT} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionFetchToken [GOOD] Test command err: Trying to start YDB, gRPC: 10996, MsgBus: 11909 2025-04-17T17:10:46.991831Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494330725628552531:2178];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:10:46.993040Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/0025ef/r3tmp/tmplSOwNW/pdisk_1.dat 2025-04-17T17:10:47.363973Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:10:47.378690Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:10:47.378789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:10:47.381788Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10996, node 1 2025-04-17T17:10:47.470281Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:10:47.470306Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:10:47.470311Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:10:47.470402Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11909 TClient is connected to server localhost:11909 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:10:48.064207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:10:51.990884Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494330725628552531:2178];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:10:51.991120Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:10:53.207303Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494330755693324157:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:53.207395Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:53.682260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-17T17:10:53.987106Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494330755693324280:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:53.988027Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:53.989805Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494330755693324285:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:54.004239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494330759988291610:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:54.004281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-17T17:10:54.004297Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:54.004547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494330759988291612:2359], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:54.069510Z node 1 :TX_PROXY ERROR: Actor# [1:7494330759988291617:2396] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-17T17:10:54.086612Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-17T17:10:54.089018Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494330755693324287:2352], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T17:10:54.089063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494330759988291615:2360], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T17:10:54.172638Z node 1 :TX_PROXY ERROR: Actor# [1:7494330759988291644:2420] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:10:54.194164Z node 1 :TX_PROXY ERROR: Actor# [1:7494330759988291665:2430] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:10:55.993767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-17T17:10:56.655974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:1, at schemeshard: 72057594046644480 2025-04-17T17:10:57.355637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T17:10:58.833370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-17T17:11:00.748534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-17T17:11:01.880439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-17T17:11:02.161624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-17T17:11:02.360995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:11:02.361016Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:04.896546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710708:0, at schemeshard: 72057594046644480 2025-04-17T17:11:04.903610Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2bprgndw1rh6b8r68bh5gh", SessionId: ydb://session/3?node_id=1&id=N2IxMjU5NGUtMzM0MWM5MDctODUxMjM2YWUtOTBhZDBiYTg=, Slow query, duration: 11.696363s, status: SUCCESS, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE OBJECT external_data_source_password (TYPE SECRET) WITH (value=qwerty12345);\n\n CREATE EXTERNAL DATA SOURCE external_data_source WITH (\n SOURCE_TYPE=\"Ydb\",\n LOCATION=\"localhost:2136\",\n AUTH_METHOD=\"BASIC\",\n LOGIN=\"crab\",\n DATABASE_NAME=\"pgdb\",\n PASSWORD_SECRET_NAME=\"external_data_source_password\",\n USE_TLS=\"TRUE\"\n );\n ", parameters: 0b Call DescribeTable. data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } columns { name: "data_column" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 Trying to start YDB, gRPC: 23163, MsgBus: 2174 2025-04-17T17:11:07.349495Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494330814584065255:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:11:07.349787Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/0025ef/r3tmp/tmpZJIX6j/pdisk_1.dat 2025-04-17T17:11:08.879981Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:09.037013Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:11:09.088399Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:11:09.100219Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:11:09.111533Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23163, node 2 2025-04-17T17:11:09.800408Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:11:09.800429Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:11:09.800436Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:11:09.800743Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2174 2025-04-17T17:11:12.336920Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494330814584065255:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:11:12.336968Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:2174 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:11:13.812716Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 5887, MsgBus: 1485 2025-04-17T17:11:22.512185Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494330879779215948:2215];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/0025ef/r3tmp/tmpfdxeli/pdisk_1.dat 2025-04-17T17:11:22.563353Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:11:22.654154Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:22.681563Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:11:22.681645Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 5887, node 3 2025-04-17T17:11:22.685799Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:11:23.241068Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:11:23.241087Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:11:23.241092Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:11:23.241192Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1485 TClient is connected to server localhost:1485 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:11:24.912181Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:11:24.918268Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 >> TPQCachingProxyTest::TestPublishAndForget [GOOD] |74.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |74.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |74.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |74.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |74.7%| [LD] {RESULT} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |74.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut >> SystemView::VSlotsFields [GOOD] >> SystemView::TopPartitionsByCpuTables |74.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |74.7%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |74.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut >> TTransferTests::Alter [GOOD] |74.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest |74.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestPublishAndForget [GOOD] Test command err: 2025-04-17T17:11:30.837808Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:11:30.837907Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:11:30.855311Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:11:30.855436Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 1 2025-04-17T17:11:30.855543Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 1 for session: session1 2025-04-17T17:11:30.855585Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1, Generation: 1 2025-04-17T17:11:30.855711Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: forget read: 1 for session session1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_transfer/unittest >> TTransferTests::Alter [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:10:57.655513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:10:57.656221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:10:57.656604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:10:57.659530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:10:57.659780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:10:57.660017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:10:57.660963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:10:57.661364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:10:57.663515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:10:58.180980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:10:58.181203Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:10:58.312698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:10:58.314559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:10:58.315087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:10:58.437154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:10:58.440010Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:10:58.442355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:10:58.442594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:10:58.459257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:10:58.460412Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:10:58.460468Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:10:58.460603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:10:58.460641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:10:58.460670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:10:58.460779Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:10:58.539914Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:10:59.081599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:10:59.083032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:10:59.084399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:10:59.084629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:10:59.084676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:10:59.092477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:10:59.094430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:10:59.095908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:10:59.099165Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:10:59.099411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:10:59.099882Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:10:59.113940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:10:59.113992Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:10:59.114214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:10:59.128431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:10:59.128680Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:10:59.128916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:10:59.129381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:10:59.151354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:10:59.175606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:10:59.177135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:10:59.201642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:10:59.201787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:10:59.202624Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:10:59.204473Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:10:59.204729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:10:59.206486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:10:59.207831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:10:59.221843Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:10:59.222308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:10:59.223805Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:10:59.224039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:10:59.226278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:10:59.226548Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:10:59.227696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:10:59.228666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:10:59.228960Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:10:59.229179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:10:59.229440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:10:59.230133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:10:59.230387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:10:59.230640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:10:59.231168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:10:59.231419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:10:59.231690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:10:59.243046Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:10:59.244032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:10:59.244863Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Id, TxId: 103, partId: 0, tablet: 72075186233409546 2025-04-17T17:11:31.154079Z node 6 :REPLICATION_CONTROLLER TRACE: [controller 72075186233409546] Handle NKikimrReplication.TEvAlterReplication PathId { OwnerId: 72057594046678944 LocalId: 2 } OperationId { TxId: 103 PartId: 0 } SwitchState { StandBy { } } Config { SrcConnectionParams { StaticCredentials { User: "user" Password: "***" } } ConsistencySettings { Row { } } TransferSpecific { Target { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot2/Table" } } } 2025-04-17T17:11:31.154361Z node 6 :REPLICATION_CONTROLLER DEBUG: [controller 72075186233409546][TxAlterReplication] Execute: NKikimrReplication.TEvAlterReplication PathId { OwnerId: 72057594046678944 LocalId: 2 } OperationId { TxId: 103 PartId: 0 } SwitchState { StandBy { } } Config { SrcConnectionParams { StaticCredentials { User: "user" Password: "***" } } ConsistencySettings { Row { } } TransferSpecific { Target { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot2/Table" } } } 2025-04-17T17:11:31.154500Z node 6 :REPLICATION_CONTROLLER NOTICE: [controller 72075186233409546][TxAlterReplication] Alter replication: rid# 1, pathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:11:31.155926Z node 6 :REPLICATION_CONTROLLER DEBUG: [controller 72075186233409546][TxAlterReplication] Complete 2025-04-17T17:11:31.156151Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 276758531, Sender [6:309:2296], Recipient [6:125:2151]: NKikimrReplication.TEvAlterReplicationResult OperationId { TxId: 103 PartId: 0 } Origin: 72075186233409546 Status: SUCCESS 2025-04-17T17:11:31.156194Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NReplication::TEvController::TEvAlterReplicationResult 2025-04-17T17:11:31.156287Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvAlterReplicationResult, at schemeshard: 72057594046678944, message: OperationId { TxId: 103 PartId: 0 } Origin: 72075186233409546 Status: SUCCESS 2025-04-17T17:11:31.156482Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: OperationId { TxId: 103 PartId: 0 } Origin: 72075186233409546 Status: SUCCESS 2025-04-17T17:11:31.156586Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TAlterReplication TConfigureParts opId# 103:0 HandleReply NKikimrReplication.TEvAlterReplicationResult OperationId { TxId: 103 PartId: 0 } Origin: 72075186233409546 Status: SUCCESS 2025-04-17T17:11:31.156637Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 3 -> 128 2025-04-17T17:11:31.156745Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-17T17:11:31.156805Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Ack tablet strongly msg opId: 103:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:2 2025-04-17T17:11:31.158338Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T17:11:31.158395Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:11:31.158442Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 103:0 2025-04-17T17:11:31.158578Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [6:125:2151], Recipient [6:125:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-17T17:11:31.158615Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-17T17:11:31.158661Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T17:11:31.158703Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TAlterReplication TPropose opId# 103:0 ProgressState 2025-04-17T17:11:31.158749Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-17T17:11:31.158796Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2025-04-17T17:11:31.158947Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:11:31.160233Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:11:31.160295Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-04-17T17:11:31.160398Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 2025-04-17T17:11:31.160673Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269287424, Sender [6:132:2155], Recipient [6:259:2250] 2025-04-17T17:11:31.160738Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:11:31.160827Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:11:31.160960Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 25769805931 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:11:31.161018Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TAlterReplication TPropose opId# 103:0 HandleReply TEvOperationPlan: step# 5000004 2025-04-17T17:11:31.161152Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 240 2025-04-17T17:11:31.161361Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-17T17:11:31.161466Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T17:11:31.161545Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Ack tablet strongly msg opId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 2025-04-17T17:11:31.163257Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:11:31.163309Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Ack coordinator stepId#5000004 first txId#103 countTxs#1 2025-04-17T17:11:31.163367Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Ack mediator stepId#5000004 2025-04-17T17:11:31.163406Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 103:0 2025-04-17T17:11:31.163577Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [6:125:2151], Recipient [6:125:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-17T17:11:31.163611Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation FAKE_COORDINATOR: Erasing txId 103 2025-04-17T17:11:31.163699Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:11:31.163747Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:11:31.163986Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:11:31.164037Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [6:208:2210], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-17T17:11:31.164486Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T17:11:31.164541Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-17T17:11:31.164669Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-17T17:11:31.164723Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-17T17:11:31.164769Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T17:11:31.164820Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-17T17:11:31.164863Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T17:11:31.164912Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-04-17T17:11:31.164967Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T17:11:31.165017Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-17T17:11:31.165055Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-17T17:11:31.165188Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T17:11:31.165238Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 1, subscribers: 0 2025-04-17T17:11:31.165285Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 2], 4 2025-04-17T17:11:31.165989Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [6:208:2210], Recipient [6:125:2151]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 4 } 2025-04-17T17:11:31.166039Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-04-17T17:11:31.166131Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:11:31.166247Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:11:31.166291Z node 6 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-04-17T17:11:31.166342Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-04-17T17:11:31.166387Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T17:11:31.166495Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-04-17T17:11:31.166541Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-17T17:11:31.169675Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:11:31.169948Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-17T17:11:31.170006Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 >> SystemView::CollectPreparedQueries [GOOD] >> SystemView::AuthUsers >> TTxDataShardMiniKQL::CrossShard_2_SwapAndCopy [GOOD] >> TTxDataShardMiniKQL::CrossShard_3_AllToOne >> DataShardWrite::UpsertImmediateManyColumns [GOOD] >> DataShardWrite::ReplaceImmediate >> TKeyValueTest::TestConcatWorksNewApi [GOOD] >> TKeyValueTest::TestConcatToLongKey |74.7%| [TA] $(B)/ydb/core/persqueue/dread_cache_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |74.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest |74.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest |74.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_transfer/test-results/unittest/{meta.json ... results_accumulator.log} |74.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> TTxDataShardMiniKQL::CrossShard_5_AllToAll [GOOD] >> TTxDataShardMiniKQL::CrossShard_6_Local >> DataShardWrite::UpsertPrepared-Volatile [GOOD] >> DataShardWrite::UpsertNoLocksArbiter >> KqpBatchUpdate::SimpleOnePartition >> KqpBatchUpdate::NotIdempotent |74.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |74.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |74.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest |74.8%| [TA] {RESULT} $(B)/ydb/core/persqueue/dread_cache_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |74.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_transfer/test-results/unittest/{meta.json ... results_accumulator.log} |74.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |74.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest |74.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> GenericFederatedQuery::YdbManagedSelectConstant [GOOD] >> GenericFederatedQuery::YdbSelectCount >> DataShardWrite::ExecSQLUpsertImmediate-EvWrite [GOOD] >> DataShardWrite::DeleteImmediate >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite-Volatile [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite+Volatile >> GenericFederatedQuery::IcebergHiveBasicSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHiveBasicSelectCount >> KqpBatchDelete::MultiStatement >> KqpBatchDelete::Large_2 >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] >> BSCReadOnlyPDisk::ReadOnlyOneByOne [GOOD] >> GenericFederatedQuery::IcebergHadoopSaSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHadoopSaSelectCount >> ResourcePoolsDdl::TestCreateResourcePool ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::ReadOnlyOneByOne [GOOD] Test command err: RandomSeed# 16887494862263727242 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:86:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:86:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:89:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:90:2057] recipient: [11:88:2117] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:92:2057] recipient: [11:88:2117] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:91:2118] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:145:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... boot 72057594037927937 (actor [22:56:2097]) tablet resolver refreshed! new actor is[22:85:2114] Leader for TabletID 72057594037927937 is [22:85:2114] sender: [22:139:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:52:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:57:2057] recipient: [23:52:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:74:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:80:2057] recipient: [23:36:2083] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:83:2057] recipient: [23:82:2113] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:84:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:85:2114] sender: [23:86:2057] recipient: [23:82:2113] !Reboot 72057594037927937 (actor [23:56:2097]) rebooted! !Reboot 72057594037927937 (actor [23:56:2097]) tablet resolver refreshed! new actor is[23:85:2114] Leader for TabletID 72057594037927937 is [23:85:2114] sender: [23:139:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:57:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:74:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:81:2057] recipient: [24:36:2083] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:84:2057] recipient: [24:83:2113] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:85:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:86:2114] sender: [24:87:2057] recipient: [24:83:2113] !Reboot 72057594037927937 (actor [24:56:2097]) rebooted! !Reboot 72057594037927937 (actor [24:56:2097]) tablet resolver refreshed! new actor is[24:86:2114] Leader for TabletID 72057594037927937 is [24:86:2114] sender: [24:104:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:50:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:57:2057] recipient: [25:50:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:74:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:83:2057] recipient: [25:36:2083] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:86:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:87:2057] recipient: [25:85:2115] Leader for TabletID 72057594037927937 is [25:88:2116] sender: [25:89:2057] recipient: [25:85:2115] !Reboot 72057594037927937 (actor [25:56:2097]) rebooted! !Reboot 72057594037927937 (actor [25:56:2097]) tablet resolver refreshed! new actor is[25:88:2116] Leader for TabletID 72057594037927937 is [25:88:2116] sender: [25:142:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:51:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:57:2057] recipient: [26:51:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:74:2057] recipient: [26:14:2061] !Reboot 72057594037927937 (actor [26:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:83:2057] recipient: [26:36:2083] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:86:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:87:2057] recipient: [26:85:2115] Leader for TabletID 72057594037927937 is [26:88:2116] sender: [26:89:2057] recipient: [26:85:2115] !Reboot 72057594037927937 (actor [26:56:2097]) rebooted! !Reboot 72057594037927937 (actor [26:56:2097]) tablet resolver refreshed! new actor is[26:88:2116] Leader for TabletID 72057594037927937 is [26:88:2116] sender: [26:142:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:54:2057] recipient: [27:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:54:2057] recipient: [27:51:2095] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:57:2057] recipient: [27:51:2095] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:74:2057] recipient: [27:14:2061] !Reboot 72057594037927937 (actor [27:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:84:2057] recipient: [27:36:2083] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:87:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:88:2057] recipient: [27:86:2115] Leader for TabletID 72057594037927937 is [27:89:2116] sender: [27:90:2057] recipient: [27:86:2115] !Reboot 72057594037927937 (actor [27:56:2097]) rebooted! !Reboot 72057594037927937 (actor [27:56:2097]) tablet resolver refreshed! new actor is[27:89:2116] Leader for TabletID 72057594037927937 is [27:89:2116] sender: [27:107:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:54:2057] recipient: [28:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:54:2057] recipient: [28:51:2095] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:57:2057] recipient: [28:51:2095] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:74:2057] recipient: [28:14:2061] !Reboot 72057594037927937 (actor [28:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:86:2057] recipient: [28:36:2083] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:88:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:90:2057] recipient: [28:89:2117] Leader for TabletID 72057594037927937 is [28:91:2118] sender: [28:92:2057] recipient: [28:89:2117] !Reboot 72057594037927937 (actor [28:56:2097]) rebooted! !Reboot 72057594037927937 (actor [28:56:2097]) tablet resolver refreshed! new actor is[28:91:2118] Leader for TabletID 72057594037927937 is [28:91:2118] sender: [28:145:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:54:2057] recipient: [29:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:54:2057] recipient: [29:51:2095] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:57:2057] recipient: [29:51:2095] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:74:2057] recipient: [29:14:2061] !Reboot 72057594037927937 (actor [29:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:86:2057] recipient: [29:36:2083] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:89:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:90:2057] recipient: [29:88:2117] Leader for TabletID 72057594037927937 is [29:91:2118] sender: [29:92:2057] recipient: [29:88:2117] !Reboot 72057594037927937 (actor [29:56:2097]) rebooted! !Reboot 72057594037927937 (actor [29:56:2097]) tablet resolver refreshed! new actor is[29:91:2118] Leader for TabletID 72057594037927937 is [29:91:2118] sender: [29:145:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:57:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:74:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:87:2057] recipient: [30:36:2083] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:90:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:91:2057] recipient: [30:89:2117] Leader for TabletID 72057594037927937 is [30:92:2118] sender: [30:93:2057] recipient: [30:89:2117] !Reboot 72057594037927937 (actor [30:56:2097]) rebooted! !Reboot 72057594037927937 (actor [30:56:2097]) tablet resolver refreshed! new actor is[30:92:2118] Leader for TabletID 72057594037927937 is [30:92:2118] sender: [30:146:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:57:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:74:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:90:2057] recipient: [31:36:2083] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:92:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:94:2057] recipient: [31:93:2120] Leader for TabletID 72057594037927937 is [31:95:2121] sender: [31:96:2057] recipient: [31:93:2120] !Reboot 72057594037927937 (actor [31:56:2097]) rebooted! !Reboot 72057594037927937 (actor [31:56:2097]) tablet resolver refreshed! new actor is[31:95:2121] Leader for TabletID 72057594037927937 is [31:95:2121] sender: [31:149:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:57:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:74:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:90:2057] recipient: [32:36:2083] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:93:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:94:2057] recipient: [32:92:2120] Leader for TabletID 72057594037927937 is [32:95:2121] sender: [32:96:2057] recipient: [32:92:2120] !Reboot 72057594037927937 (actor [32:56:2097]) rebooted! !Reboot 72057594037927937 (actor [32:56:2097]) tablet resolver refreshed! new actor is[32:95:2121] Leader for TabletID 72057594037927937 is [32:95:2121] sender: [32:149:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:51:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:57:2057] recipient: [33:51:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:74:2057] recipient: [33:14:2061] |74.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> GenericFederatedQuery::PostgreSQLOnPremSelectConstant [GOOD] >> GenericFederatedQuery::PostgreSQLSelectCount >> KqpBatchDelete::SimplePartitions >> GenericFederatedQuery::IcebergHiveSaSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHiveSaSelectCount |74.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest |74.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> GenericFederatedQuery::IcebergHiveTokenSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHiveTokenSelectCount |74.8%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardWrite::ReplaceImmediate [GOOD] >> DataShardWrite::ReplaceImmediate_DefaultValue >> TKeyValueTest::TestWriteReadWhileWriteWorks [GOOD] >> DataShardWrite::WriteImmediateSeveralOperations [GOOD] >> DataShardWrite::UpsertPreparedManyTables+Volatile >> BasicStatistics::TwoDatabases >> GenericFederatedQuery::IcebergHadoopTokenSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHadoopTokenSelectCount |74.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> ColumnStatistics::CountMinSketchServerlessStatistics ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadWhileWriteWorks [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:82:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:83:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:83:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:86:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:86:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:89:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:90:2057] recipient: [11:88:2117] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:92:2057] recipient: [11:88:2117] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:91:2118] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:145:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... TabletID 72057594037927937 is [13:56:2097] sender: [13:90:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:91:2057] recipient: [13:89:2117] Leader for TabletID 72057594037927937 is [13:92:2118] sender: [13:93:2057] recipient: [13:89:2117] !Reboot 72057594037927937 (actor [13:56:2097]) rebooted! !Reboot 72057594037927937 (actor [13:56:2097]) tablet resolver refreshed! new actor is[13:92:2118] Leader for TabletID 72057594037927937 is [13:92:2118] sender: [13:146:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:57:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:74:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:57:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:74:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:76:2057] recipient: [16:36:2083] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:79:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:80:2057] recipient: [16:78:2110] Leader for TabletID 72057594037927937 is [16:81:2111] sender: [16:82:2057] recipient: [16:78:2110] !Reboot 72057594037927937 (actor [16:56:2097]) rebooted! !Reboot 72057594037927937 (actor [16:56:2097]) tablet resolver refreshed! new actor is[16:81:2111] Leader for TabletID 72057594037927937 is [16:81:2111] sender: [16:135:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:57:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:74:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:76:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:78:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:80:2057] recipient: [17:79:2110] Leader for TabletID 72057594037927937 is [17:81:2111] sender: [17:82:2057] recipient: [17:79:2110] !Reboot 72057594037927937 (actor [17:56:2097]) rebooted! !Reboot 72057594037927937 (actor [17:56:2097]) tablet resolver refreshed! new actor is[17:81:2111] Leader for TabletID 72057594037927937 is [17:81:2111] sender: [17:135:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:57:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:74:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:77:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:80:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:81:2057] recipient: [18:79:2110] Leader for TabletID 72057594037927937 is [18:82:2111] sender: [18:83:2057] recipient: [18:79:2110] !Reboot 72057594037927937 (actor [18:56:2097]) rebooted! !Reboot 72057594037927937 (actor [18:56:2097]) tablet resolver refreshed! new actor is[18:82:2111] Leader for TabletID 72057594037927937 is [18:82:2111] sender: [18:136:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:52:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:57:2057] recipient: [19:52:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:74:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:80:2057] recipient: [19:36:2083] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:83:2057] recipient: [19:82:2113] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:84:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:85:2114] sender: [19:86:2057] recipient: [19:82:2113] !Reboot 72057594037927937 (actor [19:56:2097]) rebooted! !Reboot 72057594037927937 (actor [19:56:2097]) tablet resolver refreshed! new actor is[19:85:2114] Leader for TabletID 72057594037927937 is [19:85:2114] sender: [19:139:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:57:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:74:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:80:2057] recipient: [20:36:2083] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:83:2057] recipient: [20:82:2113] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:84:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:85:2114] sender: [20:86:2057] recipient: [20:82:2113] !Reboot 72057594037927937 (actor [20:56:2097]) rebooted! !Reboot 72057594037927937 (actor [20:56:2097]) tablet resolver refreshed! new actor is[20:85:2114] Leader for TabletID 72057594037927937 is [20:85:2114] sender: [20:139:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:52:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:57:2057] recipient: [21:52:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:74:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:81:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:84:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:85:2057] recipient: [21:83:2113] Leader for TabletID 72057594037927937 is [21:86:2114] sender: [21:87:2057] recipient: [21:83:2113] !Reboot 72057594037927937 (actor [21:56:2097]) rebooted! !Reboot 72057594037927937 (actor [21:56:2097]) tablet resolver refreshed! new actor is[21:86:2114] Leader for TabletID 72057594037927937 is [21:86:2114] sender: [21:140:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:57:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:74:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:84:2057] recipient: [22:36:2083] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:87:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:88:2057] recipient: [22:86:2116] Leader for TabletID 72057594037927937 is [22:89:2117] sender: [22:90:2057] recipient: [22:86:2116] !Reboot 72057594037927937 (actor [22:56:2097]) rebooted! !Reboot 72057594037927937 (actor [22:56:2097]) tablet resolver refreshed! new actor is[22:89:2117] Leader for TabletID 72057594037927937 is [22:89:2117] sender: [22:143:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:52:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:57:2057] recipient: [23:52:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:74:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:84:2057] recipient: [23:36:2083] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:87:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:88:2057] recipient: [23:86:2116] Leader for TabletID 72057594037927937 is [23:89:2117] sender: [23:90:2057] recipient: [23:86:2116] !Reboot 72057594037927937 (actor [23:56:2097]) rebooted! !Reboot 72057594037927937 (actor [23:56:2097]) tablet resolver refreshed! new actor is[23:89:2117] Leader for TabletID 72057594037927937 is [23:89:2117] sender: [23:143:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:57:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:74:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:85:2057] recipient: [24:36:2083] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:88:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:89:2057] recipient: [24:87:2116] Leader for TabletID 72057594037927937 is [24:90:2117] sender: [24:91:2057] recipient: [24:87:2116] !Reboot 72057594037927937 (actor [24:56:2097]) rebooted! !Reboot 72057594037927937 (actor [24:56:2097]) tablet resolver refreshed! new actor is[24:90:2117] Leader for TabletID 72057594037927937 is [24:90:2117] sender: [24:144:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:50:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:57:2057] recipient: [25:50:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:74:2057] recipient: [25:14:2061] >> ColumnBuildTest::CancelBuild [GOOD] >> GenericFederatedQuery::IcebergHadoopBasicSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHadoopBasicSelectCount >> DataShardWrite::UpsertNoLocksArbiter [GOOD] >> DataShardWrite::UpsertLostPrepareArbiter |74.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TKeyValueTest::TestGetStatusWorks [GOOD] >> ShowCreateView::WithTablePathPrefix [FAIL] >> ShowCreateView::WithSingleQuotedTablePathPrefix |74.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::CancelBuild [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:11:30.207960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:11:30.208050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:11:30.208089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:11:30.208140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:11:30.208189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:11:30.208243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:11:30.208317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:11:30.208385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:11:30.208706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:11:30.313231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:11:30.313288Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:30.341334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:11:30.341860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:11:30.341994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:11:30.360610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:11:30.360793Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:11:30.361460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:11:30.361681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:11:30.364782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:11:30.366028Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:11:30.366101Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:11:30.366290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:11:30.366348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:11:30.366392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:11:30.366515Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:11:30.384403Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:11:30.582052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:11:30.582291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:11:30.582491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:11:30.582721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:11:30.582768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:11:30.587177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:11:30.587329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:11:30.587515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:11:30.587573Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:11:30.587611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:11:30.587648Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:11:30.594231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:11:30.594333Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:11:30.594384Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:11:30.603880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:11:30.603958Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:11:30.604007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:11:30.604057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:11:30.608819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:11:30.619603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:11:30.619846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:11:30.620792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:11:30.620963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:11:30.621014Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:11:30.621283Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:11:30.621338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:11:30.621520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:11:30.621651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:11:30.625565Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:11:30.625646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:11:30.625865Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:11:30.625918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:11:30.626334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:11:30.626398Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:11:30.626508Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:11:30.626544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:11:30.626589Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:11:30.626641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:11:30.626683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:11:30.626740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:11:30.626785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:11:30.626821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:11:30.626898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:11:30.626953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:11:30.626990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:11:30.635485Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:11:30.635658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:11:30.635708Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T17:11:41.753628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2025-04-17T17:11:41.753753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 2025-04-17T17:11:41.753929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2025-04-17T17:11:41.753960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-04-17T17:11:41.753990Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000007 2025-04-17T17:11:41.754191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:11:41.754272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:11:41.754311Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710761:0 HandleReply TEvOperationPlan: step# 5000007 2025-04-17T17:11:41.754369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710761:0 128 -> 240 2025-04-17T17:11:41.756255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710761:0, at schemeshard: 72057594046678944 2025-04-17T17:11:41.756299Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710761:0 ProgressState 2025-04-17T17:11:41.756371Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-04-17T17:11:41.756396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-04-17T17:11:41.756430Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-04-17T17:11:41.756454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-04-17T17:11:41.756480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: true 2025-04-17T17:11:41.756527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:124:2150] message: TxId: 281474976710761 2025-04-17T17:11:41.756560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-04-17T17:11:41.756723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2025-04-17T17:11:41.756762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2025-04-17T17:11:41.756825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 12 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-04-17T17:11:41.759431Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-04-17T17:11:41.759485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2025-04-17T17:11:41.759539Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710761, buildInfoId: 102 2025-04-17T17:11:41.759613Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710761, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Cancellation_Unlocking, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [1:1171:3024], AlterMainTableTxId: 281474976710757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976710758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T17:11:41.761154Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-17T17:11:41.761220Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Cancellation_Unlocking, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [1:1171:3024], AlterMainTableTxId: 281474976710757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976710758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T17:11:41.761260Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Cancellation_Unlocking to Cancelled 2025-04-17T17:11:41.762740Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-17T17:11:41.762806Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Cancelled, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [1:1171:3024], AlterMainTableTxId: 281474976710757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976710758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T17:11:41.762923Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-04-17T17:11:41.763067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T17:11:41.763126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:1195:3048] TestWaitNotification: OK eventTxId 102 2025-04-17T17:11:41.765445Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 102 2025-04-17T17:11:41.765713Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "DefaultValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 0 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "DefaultValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 0 } 2025-04-17T17:11:41.767668Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:11:41.767870Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 222us result status StatusSuccess 2025-04-17T17:11:41.768287Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 4 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "DefaultValue" Type: "Uint64" TypeId: 4 Id: 4 NotNull: false DefaultFromLiteral { type { type_id: UINT64 } value { uint64_value: 10 } } IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 4 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 10 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |74.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::ServerlessGlobalIndex >> HttpRequest::ProbeServerless |74.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |74.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |74.9%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |74.9%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestGetStatusWorks [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:86:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:86:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:89:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:90:2057] recipient: [11:88:2117] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:92:2057] recipient: [11:88:2117] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:91:2118] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:145:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... or TabletID 72057594037927937 is [29:56:2097] sender: [29:89:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:90:2057] recipient: [29:88:2117] Leader for TabletID 72057594037927937 is [29:91:2118] sender: [29:92:2057] recipient: [29:88:2117] !Reboot 72057594037927937 (actor [29:56:2097]) rebooted! !Reboot 72057594037927937 (actor [29:56:2097]) tablet resolver refreshed! new actor is[29:91:2118] Leader for TabletID 72057594037927937 is [29:91:2118] sender: [29:145:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:57:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:74:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:87:2057] recipient: [30:36:2083] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:90:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:91:2057] recipient: [30:89:2117] Leader for TabletID 72057594037927937 is [30:92:2118] sender: [30:93:2057] recipient: [30:89:2117] !Reboot 72057594037927937 (actor [30:56:2097]) rebooted! !Reboot 72057594037927937 (actor [30:56:2097]) tablet resolver refreshed! new actor is[30:92:2118] Leader for TabletID 72057594037927937 is [30:92:2118] sender: [30:146:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:57:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:74:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:90:2057] recipient: [31:36:2083] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:92:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:94:2057] recipient: [31:93:2120] Leader for TabletID 72057594037927937 is [31:95:2121] sender: [31:96:2057] recipient: [31:93:2120] !Reboot 72057594037927937 (actor [31:56:2097]) rebooted! !Reboot 72057594037927937 (actor [31:56:2097]) tablet resolver refreshed! new actor is[31:95:2121] Leader for TabletID 72057594037927937 is [31:95:2121] sender: [31:149:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:57:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:74:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:90:2057] recipient: [32:36:2083] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:93:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:94:2057] recipient: [32:92:2120] Leader for TabletID 72057594037927937 is [32:95:2121] sender: [32:96:2057] recipient: [32:92:2120] !Reboot 72057594037927937 (actor [32:56:2097]) rebooted! !Reboot 72057594037927937 (actor [32:56:2097]) tablet resolver refreshed! new actor is[32:95:2121] Leader for TabletID 72057594037927937 is [32:95:2121] sender: [32:149:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:51:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:57:2057] recipient: [33:51:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:74:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:57:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:74:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:54:2057] recipient: [35:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:54:2057] recipient: [35:52:2095] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:57:2057] recipient: [35:52:2095] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:74:2057] recipient: [35:14:2061] !Reboot 72057594037927937 (actor [35:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:76:2057] recipient: [35:36:2083] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:79:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:80:2057] recipient: [35:78:2110] Leader for TabletID 72057594037927937 is [35:81:2111] sender: [35:82:2057] recipient: [35:78:2110] !Reboot 72057594037927937 (actor [35:56:2097]) rebooted! !Reboot 72057594037927937 (actor [35:56:2097]) tablet resolver refreshed! new actor is[35:81:2111] Leader for TabletID 72057594037927937 is [35:81:2111] sender: [35:135:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:54:2057] recipient: [36:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:54:2057] recipient: [36:51:2095] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:57:2057] recipient: [36:51:2095] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:74:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:76:2057] recipient: [36:36:2083] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:79:2057] recipient: [36:78:2110] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:80:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [36:81:2111] sender: [36:82:2057] recipient: [36:78:2110] !Reboot 72057594037927937 (actor [36:56:2097]) rebooted! !Reboot 72057594037927937 (actor [36:56:2097]) tablet resolver refreshed! new actor is[36:81:2111] Leader for TabletID 72057594037927937 is [36:81:2111] sender: [36:135:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:54:2057] recipient: [37:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:54:2057] recipient: [37:50:2095] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:57:2057] recipient: [37:50:2095] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:74:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:77:2057] recipient: [37:36:2083] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:80:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:81:2057] recipient: [37:79:2110] Leader for TabletID 72057594037927937 is [37:82:2111] sender: [37:83:2057] recipient: [37:79:2110] !Reboot 72057594037927937 (actor [37:56:2097]) rebooted! !Reboot 72057594037927937 (actor [37:56:2097]) tablet resolver refreshed! new actor is[37:82:2111] Leader for TabletID 72057594037927937 is [37:82:2111] sender: [37:136:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:54:2057] recipient: [38:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:54:2057] recipient: [38:52:2095] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:57:2057] recipient: [38:52:2095] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:74:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:79:2057] recipient: [38:36:2083] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:82:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:83:2057] recipient: [38:81:2112] Leader for TabletID 72057594037927937 is [38:84:2113] sender: [38:85:2057] recipient: [38:81:2112] !Reboot 72057594037927937 (actor [38:56:2097]) rebooted! !Reboot 72057594037927937 (actor [38:56:2097]) tablet resolver refreshed! new actor is[38:84:2113] Leader for TabletID 72057594037927937 is [38:84:2113] sender: [38:138:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:54:2057] recipient: [39:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:54:2057] recipient: [39:51:2095] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:57:2057] recipient: [39:51:2095] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:74:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:79:2057] recipient: [39:36:2083] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:81:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:83:2057] recipient: [39:82:2112] Leader for TabletID 72057594037927937 is [39:84:2113] sender: [39:85:2057] recipient: [39:82:2112] !Reboot 72057594037927937 (actor [39:56:2097]) rebooted! !Reboot 72057594037927937 (actor [39:56:2097]) tablet resolver refreshed! new actor is[39:84:2113] Leader for TabletID 72057594037927937 is [39:84:2113] sender: [39:138:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:54:2057] recipient: [40:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:54:2057] recipient: [40:52:2095] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:57:2057] recipient: [40:52:2095] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:74:2057] recipient: [40:14:2061] !Reboot 72057594037927937 (actor [40:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:80:2057] recipient: [40:36:2083] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:83:2057] recipient: [40:82:2112] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:84:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [40:85:2113] sender: [40:86:2057] recipient: [40:82:2112] !Reboot 72057594037927937 (actor [40:56:2097]) rebooted! !Reboot 72057594037927937 (actor [40:56:2097]) tablet resolver refreshed! new actor is[40:85:2113] Leader for TabletID 72057594037927937 is [40:85:2113] sender: [40:139:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:54:2057] recipient: [41:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:54:2057] recipient: [41:51:2095] Leader for TabletID 72057594037927937 is [41:56:2097] sender: [41:57:2057] recipient: [41:51:2095] Leader for TabletID 72057594037927937 is [41:56:2097] sender: [41:74:2057] recipient: [41:14:2061] >> DataShardWrite::DeleteImmediate [GOOD] >> DataShardWrite::CancelImmediate ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::ClickHouseManagedSelectConstant 2025-04-17 17:11:38,476 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-17 17:11:38,640 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 60 secs timeout. Process tree before termination: pid rss ref pdirt 21092 46.0M 46.0M 23.0M test_tool run_ut @/home/runner/.ya/build/build_root/6vg8/00263b/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk0/testing_out_stuff/test 21645 1.4G 1.4G 969M └─ ydb-core-kqp-ut-federated_query-generic_ut --trace-path-append /home/runner/.ya/build/build_root/6vg8/00263b/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unitt Test command err: Trying to start YDB, gRPC: 26688, MsgBus: 9339 2025-04-17T17:10:43.215541Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494330711746875610:2212];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/00263b/r3tmp/tmpbPucLD/pdisk_1.dat 2025-04-17T17:10:45.367623Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:10:46.588484Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:10:46.669312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:10:46.670590Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:10:46.674159Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:10:46.677010Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26688, node 1 2025-04-17T17:10:46.772960Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:10:46.772978Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:10:46.772983Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:10:46.773137Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9339 TClient is connected to server localhost:9339 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:10:47.339370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:10:48.116317Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494330711746875610:2212];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:10:48.116434Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:10:50.447812Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494330741811647188:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:50.947376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:51.639310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-17T17:10:51.805036Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494330746106614612:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:51.805095Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:51.805262Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494330746106614617:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:51.819384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-17T17:10:51.893583Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494330746106614619:2354], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T17:10:51.965942Z node 1 :TX_PROXY ERROR: Actor# [1:7494330746106614661:2411] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:10:53.714009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:10:54.340829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-17T17:10:54.934546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T17:10:55.766133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T17:10:57.017144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-17T17:10:58.534435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-17T17:10:58.646751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-17T17:11:01.656726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:11:01.656945Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:04.461496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710721:0, at schemeshard: 72057594046644480 2025-04-17T17:11:04.469091Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2bpnk1easbbfdmjxj74t9s", SessionId: ydb://session/3?node_id=1&id=OTgyOTllNTMtMjVlMGRlYWQtN2FkN2E3MjQtYjgwMmZiZjM=, Slow query, duration: 14.244717s, status: SUCCESS, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE OBJECT sa_signature (TYPE SECRET) WITH (value=sa_signature);\n CREATE OBJECT external_data_source_password (TYPE SECRET) WITH (value=qwerty12345);\n\n CREATE EXTERNAL DATA SOURCE external_data_source WITH (\n SOURCE_TYPE=\"ClickHouse\",\n MDB_CLUSTER_ID=\"ch-managed\",\n AUTH_METHOD=\"MDB_BASIC\",\n SERVICE_ACCOUNT_ID=\"sa\",\n SERVICE_ACCOUNT_SECRET_NAME=\"sa_signature\",\n LOGIN=\"crab\",\n PASSWORD_SECRET_NAME=\"external_data_source_password\",\n USE_TLS=\"TRUE\",\n PROTOCOL=\"HTTP\",\n DATABASE_NAME=\"pgdb\"\n );\n ", parameters: 0b 2025-04-17T17:11:04.493828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710722:0, at schemeshard: 72057594046644480 2025-04-17T17:11:04.495273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710723:0, at schemeshard: 72057594046644480 2025-04-17T17:11:04.497408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710724:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgd ... -04-17T17:11:25.192994Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:14644 TClient is connected to server localhost:14644 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:11:27.831964Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:11:27.847674Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:11:29.843528Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494330909931816580:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:11:29.843610Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:11:29.864021Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-17T17:11:29.924501Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494330909931816699:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:11:29.924741Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:11:29.925112Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494330909931816705:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:11:29.928593Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-17T17:11:29.944311Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-17T17:11:29.944490Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494330909931816707:2357], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T17:11:30.028205Z node 2 :TX_PROXY ERROR: Actor# [2:7494330914226784069:2421] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:11:30.692179Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:11:31.244049Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:1, at schemeshard: 72057594046644480 2025-04-17T17:11:31.806670Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T17:11:32.304902Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T17:11:32.765684Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T17:11:33.273097Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-17T17:11:33.366102Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-17T17:11:36.951789Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:11:36.951815Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:37.929577Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710729:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } columns { name: "col2" type { type_id: DOUBLE } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/6vg8/00263b/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk0/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/6vg8/00263b/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk0/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout",), {}) ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHiveBasicSelectCount 2025-04-17 17:11:40,266 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-17 17:11:40,694 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 60 secs timeout. Process tree before termination: pid rss ref pdirt 21287 46.0M 46.0M 23.0M test_tool run_ut @/home/runner/.ya/build/build_root/6vg8/002637/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk4/testing_out_stuff/test 21647 1.4G 1.4G 895M └─ ydb-core-kqp-ut-federated_query-generic_ut --trace-path-append /home/runner/.ya/build/build_root/6vg8/002637/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unitt Test command err: Trying to start YDB, gRPC: 20354, MsgBus: 29126 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/002637/r3tmp/tmpyQrU61/pdisk_1.dat 2025-04-17T17:10:46.575658Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:10:46.658657Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20354, node 1 2025-04-17T17:10:46.758088Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:10:46.758211Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:10:46.759930Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:10:46.764734Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:10:46.764751Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:10:46.764756Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:10:46.764859Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29126 TClient is connected to server localhost:29126 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:10:47.416519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:10:51.299279Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494330746844025427:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:51.299751Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:51.789868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-17T17:10:52.068344Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494330751138992850:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:52.068793Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:52.071036Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494330751138992855:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:52.088350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-17T17:10:52.137852Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494330751138992857:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T17:10:52.220085Z node 1 :TX_PROXY ERROR: Actor# [1:7494330751138992898:2410] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:10:53.788899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:10:54.337493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-17T17:10:54.878108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T17:10:55.821729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T17:10:57.789567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-17T17:10:59.550646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-17T17:10:59.796167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-17T17:11:01.661453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:11:01.661672Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:04.316614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710708:0, at schemeshard: 72057594046644480 2025-04-17T17:11:04.323789Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2bppej23rgawkpptj2gxqa", SessionId: ydb://session/3?node_id=1&id=NTlhOGE0M2EtZDlmMzhkOTgtYWUwZmVmOWItZjEyYzZmOTg=, Slow query, duration: 13.210421s, status: SUCCESS, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE OBJECT external_data_source_p (TYPE SECRET) WITH (value=qwerty12345);\n\n CREATE EXTERNAL DATA SOURCE external_data_source WITH (\n SOURCE_TYPE=\"Iceberg\",\n DATABASE_NAME=\"pgdb\",\n WAREHOUSE_TYPE=\"s3\",\n WAREHOUSE_S3_REGION=\"s3_region\",\n WAREHOUSE_S3_ENDPOINT=\"s3_endpoint\",\n WAREHOUSE_S3_URI=\"s3_uri\",\n \n AUTH_METHOD=\"BASIC\",\n LOGIN=\"crab\",\n PASSWORD_SECRET_NAME=\"external_data_source_p\"\n ,\n \n CATALOG_TYPE=\"hive\",\n CATALOG_HIVE_URI=\"hive_uri\"\n ,\n USE_TLS=\"FALSE\"\n );\n ", parameters: 0b 2025-04-17T17:11:04.362898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710709:0, at schemeshard: 72057594046644480 2025-04-17T17:11:04.363848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710710:0, at schemeshard: 72057594046644480 2025-04-17T17:11:04.367089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710711:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: fal ... 3258Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:11:27.544143Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494330903270584786:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:11:27.548927Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-17T17:11:27.576401Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494330903270584788:2359], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T17:11:27.644479Z node 2 :TX_PROXY ERROR: Actor# [2:7494330903270584830:2421] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:11:28.416288Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:11:28.914661Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-17T17:11:29.481479Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T17:11:30.009777Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T17:11:30.594384Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-17T17:11:31.287889Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-17T17:11:31.398059Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-17T17:11:33.476208Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:11:33.476240Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:33.868990Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710700:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } columns { name: "col2" type { type_id: DOUBLE } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 Trying to start YDB, gRPC: 15510, MsgBus: 29232 2025-04-17T17:11:38.561769Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494330947206721480:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:11:38.561816Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/002637/r3tmp/tmpL32UZ0/pdisk_1.dat 2025-04-17T17:11:39.466239Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:39.469299Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:11:39.469396Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:11:39.495594Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15510, node 3 2025-04-17T17:11:39.736760Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:11:39.736885Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:11:39.736894Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:11:39.737012Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/6vg8/002637/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk4/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/6vg8/002637/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk4/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout",), {}) |74.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest |74.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::ShardsNotLeftInShardsToDelete >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite+Volatile [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite+Volatile >> TKeyValueTest::TestCopyRangeToLongKey [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestCopyRangeToLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:84:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:88:2057] recipient: [8:86:2116] Leader for TabletID 72057594037927937 is [8:89:2117] sender: [8:90:2057] recipient: [8:86:2116] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:89:2117] Leader for TabletID 72057594037927937 is [8:89:2117] sender: [8:143:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:84:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:88:2057] recipient: [9:86:2116] Leader for TabletID 72057594037927937 is [9:89:2117] sender: [9:90:2057] recipient: [9:86:2116] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:89:2117] Leader for TabletID 72057594037927937 is [9:89:2117] sender: [9:143:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:85:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:89:2057] recipient: [10:87:2116] Leader for TabletID 72057594037927937 is [10:90:2117] sender: [10:91:2057] recipient: [10:87:2116] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:90:2117] Leader for TabletID 72057594037927937 is [10:90:2117] sender: [10:144:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:88:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:91:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:92:2057] recipient: [11:90:2119] Leader for TabletID 72057594037927937 is [11:93:2120] sender: [11:94:2057] recipient: [11:90:2119] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:93:2120] Leader for TabletID 72057594037927937 is [11:93:2120] sender: [11:147:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... 7 is [27:56:2097] sender: [27:89:2057] recipient: [27:36:2083] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:92:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:93:2057] recipient: [27:91:2119] Leader for TabletID 72057594037927937 is [27:94:2120] sender: [27:95:2057] recipient: [27:91:2119] !Reboot 72057594037927937 (actor [27:56:2097]) rebooted! !Reboot 72057594037927937 (actor [27:56:2097]) tablet resolver refreshed! new actor is[27:94:2120] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:54:2057] recipient: [28:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:54:2057] recipient: [28:51:2095] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:57:2057] recipient: [28:51:2095] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:74:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:54:2057] recipient: [29:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:54:2057] recipient: [29:51:2095] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:57:2057] recipient: [29:51:2095] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:74:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:57:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:74:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:76:2057] recipient: [30:36:2083] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:79:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:80:2057] recipient: [30:78:2110] Leader for TabletID 72057594037927937 is [30:81:2111] sender: [30:82:2057] recipient: [30:78:2110] !Reboot 72057594037927937 (actor [30:56:2097]) rebooted! !Reboot 72057594037927937 (actor [30:56:2097]) tablet resolver refreshed! new actor is[30:81:2111] Leader for TabletID 72057594037927937 is [30:81:2111] sender: [30:135:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:57:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:74:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:56:2097]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:76:2057] recipient: [31:36:2083] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:79:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:80:2057] recipient: [31:78:2110] Leader for TabletID 72057594037927937 is [31:81:2111] sender: [31:82:2057] recipient: [31:78:2110] !Reboot 72057594037927937 (actor [31:56:2097]) rebooted! !Reboot 72057594037927937 (actor [31:56:2097]) tablet resolver refreshed! new actor is[31:81:2111] Leader for TabletID 72057594037927937 is [31:81:2111] sender: [31:135:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:57:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:74:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:77:2057] recipient: [32:36:2083] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:80:2057] recipient: [32:79:2110] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:81:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:82:2111] sender: [32:83:2057] recipient: [32:79:2110] !Reboot 72057594037927937 (actor [32:56:2097]) rebooted! !Reboot 72057594037927937 (actor [32:56:2097]) tablet resolver refreshed! new actor is[32:82:2111] Leader for TabletID 72057594037927937 is [32:82:2111] sender: [32:136:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:51:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:57:2057] recipient: [33:51:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:74:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:80:2057] recipient: [33:36:2083] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:83:2057] recipient: [33:82:2113] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:84:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [33:85:2114] sender: [33:86:2057] recipient: [33:82:2113] !Reboot 72057594037927937 (actor [33:56:2097]) rebooted! !Reboot 72057594037927937 (actor [33:56:2097]) tablet resolver refreshed! new actor is[33:85:2114] Leader for TabletID 72057594037927937 is [33:85:2114] sender: [33:139:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:57:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:74:2057] recipient: [34:14:2061] !Reboot 72057594037927937 (actor [34:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:80:2057] recipient: [34:36:2083] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:83:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:84:2057] recipient: [34:82:2113] Leader for TabletID 72057594037927937 is [34:85:2114] sender: [34:86:2057] recipient: [34:82:2113] !Reboot 72057594037927937 (actor [34:56:2097]) rebooted! !Reboot 72057594037927937 (actor [34:56:2097]) tablet resolver refreshed! new actor is[34:85:2114] Leader for TabletID 72057594037927937 is [34:85:2114] sender: [34:139:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:54:2057] recipient: [35:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:54:2057] recipient: [35:52:2095] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:57:2057] recipient: [35:52:2095] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:74:2057] recipient: [35:14:2061] !Reboot 72057594037927937 (actor [35:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:81:2057] recipient: [35:36:2083] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:84:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:85:2057] recipient: [35:83:2113] Leader for TabletID 72057594037927937 is [35:86:2114] sender: [35:87:2057] recipient: [35:83:2113] !Reboot 72057594037927937 (actor [35:56:2097]) rebooted! !Reboot 72057594037927937 (actor [35:56:2097]) tablet resolver refreshed! new actor is[35:86:2114] Leader for TabletID 72057594037927937 is [35:86:2114] sender: [35:140:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:54:2057] recipient: [36:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:54:2057] recipient: [36:51:2095] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:57:2057] recipient: [36:51:2095] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:74:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:84:2057] recipient: [36:36:2083] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:87:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:88:2057] recipient: [36:86:2116] Leader for TabletID 72057594037927937 is [36:89:2117] sender: [36:90:2057] recipient: [36:86:2116] !Reboot 72057594037927937 (actor [36:56:2097]) rebooted! !Reboot 72057594037927937 (actor [36:56:2097]) tablet resolver refreshed! new actor is[36:89:2117] Leader for TabletID 72057594037927937 is [36:89:2117] sender: [36:143:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:54:2057] recipient: [37:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:54:2057] recipient: [37:50:2095] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:57:2057] recipient: [37:50:2095] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:74:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:84:2057] recipient: [37:36:2083] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:87:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:88:2057] recipient: [37:86:2116] Leader for TabletID 72057594037927937 is [37:89:2117] sender: [37:90:2057] recipient: [37:86:2116] !Reboot 72057594037927937 (actor [37:56:2097]) rebooted! !Reboot 72057594037927937 (actor [37:56:2097]) tablet resolver refreshed! new actor is[37:89:2117] Leader for TabletID 72057594037927937 is [37:89:2117] sender: [37:143:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:54:2057] recipient: [38:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:54:2057] recipient: [38:52:2095] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:57:2057] recipient: [38:52:2095] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:74:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:85:2057] recipient: [38:36:2083] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:88:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:89:2057] recipient: [38:87:2116] Leader for TabletID 72057594037927937 is [38:90:2117] sender: [38:91:2057] recipient: [38:87:2116] !Reboot 72057594037927937 (actor [38:56:2097]) rebooted! !Reboot 72057594037927937 (actor [38:56:2097]) tablet resolver refreshed! new actor is[38:90:2117] Leader for TabletID 72057594037927937 is [38:90:2117] sender: [38:144:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:54:2057] recipient: [39:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:54:2057] recipient: [39:51:2095] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:57:2057] recipient: [39:51:2095] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:74:2057] recipient: [39:14:2061] |74.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest |74.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> DataShardWrite::UpsertLostPrepareArbiter [GOOD] >> DataShardWrite::UpsertNoLocksArbiterRestart |74.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest |74.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |74.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |74.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHiveTokenSelectCount 2025-04-17 17:11:42,430 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-17 17:11:42,704 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 60 secs timeout. Process tree before termination: pid rss ref pdirt 21692 46.0M 46.0M 23.0M test_tool run_ut @/home/runner/.ya/build/build_root/6vg8/002607/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk6/testing_out_stuff/test 21920 1.4G 1.4G 960M └─ ydb-core-kqp-ut-federated_query-generic_ut --trace-path-append /home/runner/.ya/build/build_root/6vg8/002607/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unitt Test command err: Trying to start YDB, gRPC: 22932, MsgBus: 32629 2025-04-17T17:10:46.958742Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494330723677709195:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:10:46.958810Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/002607/r3tmp/tmpWyIn7f/pdisk_1.dat 2025-04-17T17:10:47.381545Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22932, node 1 2025-04-17T17:10:47.444578Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:10:47.444712Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:10:47.447540Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:10:47.548806Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:10:47.548833Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:10:47.548839Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:10:47.548957Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32629 TClient is connected to server localhost:32629 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:10:48.151653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:10:48.168335Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:10:51.960591Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494330723677709195:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:10:51.960646Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:10:53.922009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494330753742480945:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:53.922083Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:54.617229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-17T17:10:55.047139Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494330758037448363:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:55.047207Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:55.064967Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494330762332415666:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:55.126493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-17T17:10:55.141891Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494330762332415702:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:55.143345Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494330762332415700:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:55.143419Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:55.242735Z node 1 :TX_PROXY ERROR: Actor# [1:7494330762332415706:2400] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-17T17:10:55.259682Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494330762332415668:2352], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T17:10:55.342824Z node 1 :TX_PROXY ERROR: Actor# [1:7494330762332415745:2428] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:10:57.147118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:10:57.843165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:1, at schemeshard: 72057594046644480 2025-04-17T17:10:58.927370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T17:11:00.684335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T17:11:02.424649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:11:02.425041Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:02.592181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-17T17:11:03.506519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-17T17:11:03.620001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-17T17:11:05.808622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710705:0, at schemeshard: 72057594046644480 2025-04-17T17:11:05.814626Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2bps6x8sbmqv1mnk97h2w5", SessionId: ydb://session/3?node_id=1&id=YmVjNjkxOTUtN2ZhOTE2ZWQtM2E3YzNmNzEtY2JmODg2ZGQ=, Slow query, duration: 11.885666s, status: SUCCESS, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE OBJECT external_data_source_p (TYPE SECRET) WITH (value=qwerty12345);\n\n CREATE EXTERNAL DATA SOURCE external_data_source WITH (\n SOURCE_TYPE=\"Iceberg\",\n DATABASE_NAME=\"pgdb\",\n WAREHOUSE_TYPE=\"s3\",\n WAREHOUSE_S3_REGION=\"s3_region\",\n WAREHOUSE_S3_ENDPOINT=\"s3_endpoint\",\n WAREHOUSE_S3_URI=\"s3_uri\",\n \n AUTH_METHOD=\"TOKEN\",\n TOKEN_SECRET_NAME=\"external_data_source_p\"\n ,\n \n CATALOG_TYPE=\"hive\",\n CATALOG_HIVE_URI=\"hive_uri\"\n ,\n USE_TLS=\"FALSE\"\n );\n ", parameters: 0b 2025-04-17T17:11:05.843781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710707:0, at schemeshard: 72057594046644480 2025-04-17T17:11:05.845918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710708:0, at schemeshard: 72057594046644480 2025-04-17T17:11:05.847754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710706:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual ... :0, at schemeshard: 72057594046644480 2025-04-17T17:11:30.405265Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-17T17:11:31.003016Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T17:11:32.159227Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T17:11:33.664966Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-17T17:11:34.342729Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-17T17:11:34.437494Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-17T17:11:37.376719Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:11:37.376742Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:38.261789Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710704:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } columns { name: "col2" type { type_id: DOUBLE } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 Trying to start YDB, gRPC: 27910, MsgBus: 27692 2025-04-17T17:11:39.849370Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494330951885975608:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:11:39.849477Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/002607/r3tmp/tmpZQdEZZ/pdisk_1.dat 2025-04-17T17:11:41.129188Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:41.130646Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:11:41.193354Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:11:41.193445Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:11:41.198200Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27910, node 3 2025-04-17T17:11:41.332702Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:11:41.332723Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:11:41.332729Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:11:41.332829Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27692 TClient is connected to server localhost:27692 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:11:42.081478Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/6vg8/002607/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk6/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/6vg8/002607/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk6/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout",), {}) >> DataShardWrite::ReplaceImmediate_DefaultValue [GOOD] >> DataShardWrite::UpdateImmediate ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::YdbSelectCount 2025-04-17 17:11:41,627 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-17 17:11:41,870 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 60 secs timeout. Process tree before termination: pid rss ref pdirt 21439 46.1M 46.1M 23.1M test_tool run_ut @/home/runner/.ya/build/build_root/6vg8/002630/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk9/testing_out_stuff/test 21715 1.4G 1.4G 976M └─ ydb-core-kqp-ut-federated_query-generic_ut --trace-path-append /home/runner/.ya/build/build_root/6vg8/002630/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unitt Test command err: Trying to start YDB, gRPC: 31893, MsgBus: 20836 2025-04-17T17:10:46.327128Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494330725819273865:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:10:46.371582Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/002630/r3tmp/tmpdnszj1/pdisk_1.dat 2025-04-17T17:10:46.948847Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:10:46.953969Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:10:46.954071Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:10:46.957951Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31893, node 1 2025-04-17T17:10:47.055867Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:10:47.055896Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:10:47.055909Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:10:47.056055Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20836 TClient is connected to server localhost:20836 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:10:47.638617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:10:51.319437Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494330725819273865:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:10:51.319684Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:10:51.772785Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494330747294110947:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:51.772851Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:52.432872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-17T17:10:52.880694Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494330751589078375:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:52.881000Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:52.881464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494330751589078380:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:52.891685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-17T17:10:52.940514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494330751589078382:2352], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T17:10:53.011457Z node 1 :TX_PROXY ERROR: Actor# [1:7494330755884045719:2411] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:10:54.326882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:10:54.987393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:1, at schemeshard: 72057594046644480 2025-04-17T17:10:56.410700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T17:10:57.838415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-17T17:10:59.160602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-17T17:11:00.536234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-17T17:11:00.759203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-17T17:11:01.950819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:11:01.950847Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:03.907470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710712:0, at schemeshard: 72057594046644480 2025-04-17T17:11:03.915525Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2bpq3s1jjbhjeavqtt2zs3", SessionId: ydb://session/3?node_id=1&id=YzU2OGFjYjktOTk2ZDExZDgtZjQ3ZmY4MmEtM2U2ZjU1ZDE=, Slow query, duration: 12.137364s, status: SUCCESS, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE OBJECT external_data_source_password (TYPE SECRET) WITH (value=qwerty12345);\n\n CREATE EXTERNAL DATA SOURCE external_data_source WITH (\n SOURCE_TYPE=\"Ydb\",\n LOCATION=\"localhost:2136\",\n AUTH_METHOD=\"BASIC\",\n LOGIN=\"crab\",\n DATABASE_NAME=\"pgdb\",\n PASSWORD_SECRET_NAME=\"external_data_source_password\",\n USE_TLS=\"TRUE\"\n );\n ", parameters: 0b 2025-04-17T17:11:04.107651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710713:0, at schemeshard: 72057594046644480 2025-04-17T17:11:04.110896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710714:0, at schemeshard: 72057594046644480 2025-04-17T17:11:04.113836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710715:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "col1" type { ... Id: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:11:28.247749Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-17T17:11:28.260908Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494330906693666266:2357], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T17:11:28.343127Z node 2 :TX_PROXY ERROR: Actor# [2:7494330906693666307:2422] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:11:28.884215Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:11:29.370901Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-17T17:11:29.905538Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-17T17:11:30.594737Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T17:11:31.112657Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:11:31.112681Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:31.440257Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-17T17:11:31.948505Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-17T17:11:32.026464Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-17T17:11:33.859617Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710700:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } columns { name: "col2" type { type_id: DOUBLE } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 Trying to start YDB, gRPC: 22589, MsgBus: 63271 2025-04-17T17:11:36.545470Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494330940533987543:2128];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:11:36.546326Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/002630/r3tmp/tmpGd2C8z/pdisk_1.dat 2025-04-17T17:11:37.430436Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:37.582500Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:11:37.582675Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:11:37.605287Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:11:37.614446Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22589, node 3 2025-04-17T17:11:38.244704Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:11:38.244730Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:11:38.244736Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:11:38.244837Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63271 TClient is connected to server localhost:63271 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:11:38.945154Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:11:38.955814Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:11:41.540637Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494330940533987543:2128];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:11:41.697397Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/6vg8/002630/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk9/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/6vg8/002630/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk9/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout",), {}) >> TKeyValueTest::TestInlineCopyRangeWorksNewApi [GOOD] >> DataShardWrite::UpsertPreparedManyTables+Volatile [GOOD] >> DataShardWrite::UpsertPreparedManyTables-Volatile >> KqpBatchUpdate::NotIdempotent [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHiveSaSelectCount 2025-04-17 17:11:41,654 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-17 17:11:41,791 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 60 secs timeout. Process tree before termination: pid rss ref pdirt 21445 46.0M 46.2M 23.2M test_tool run_ut @/home/runner/.ya/build/build_root/6vg8/00262a/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk5/testing_out_stuff/test 21714 1.4G 1.4G 919M └─ ydb-core-kqp-ut-federated_query-generic_ut --trace-path-append /home/runner/.ya/build/build_root/6vg8/00262a/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unitt Test command err: Trying to start YDB, gRPC: 9056, MsgBus: 5933 2025-04-17T17:10:45.469911Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494330721980199920:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:10:45.470226Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:10:46.686078Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494330726275167480:2291];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:10:46.686119Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/00262a/r3tmp/tmpKtn2G9/pdisk_1.dat 2025-04-17T17:10:46.855236Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:10:46.858582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:10:46.858710Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:10:46.864938Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9056, node 1 2025-04-17T17:10:46.961664Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:10:46.961763Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:10:46.961837Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:10:46.962179Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5933 TClient is connected to server localhost:5933 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:10:47.526659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:10:47.545023Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:10:50.463506Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494330721980199920:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:10:50.463729Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:10:51.765022Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494330726275167480:2291];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:10:51.770640Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:10:51.926997Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494330747750004374:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:51.929269Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:52.613446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-17T17:10:52.957283Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494330752044971802:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:52.957336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:52.959113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494330752044971807:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:52.972242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-17T17:10:53.020593Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494330752044971809:2356], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T17:10:53.095563Z node 1 :TX_PROXY ERROR: Actor# [1:7494330756339939146:2415] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:10:54.499071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:10:55.046237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-17T17:10:56.018027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T17:10:56.914099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T17:10:58.251511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-17T17:11:00.506550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-17T17:11:00.779843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-17T17:11:01.859598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:11:01.859620Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:04.211392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710706:0, at schemeshard: 72057594046644480 2025-04-17T17:11:04.217211Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2bpq8a9t5k6jtmj1ggnxyp", SessionId: ydb://session/3?node_id=1&id=NzBjMjQ3NGYtN2Y5NDEzYzUtOTM4MzZiZTYtNzRjMzcyZjM=, Slow query, duration: 12.285356s, status: SUCCESS, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE OBJECT external_data_source_p (TYPE SECRET) WITH (value=qwerty12345);\n\n CREATE EXTERNAL DATA SOURCE external_data_source WITH (\n SOURCE_TYPE=\"Iceberg\",\n DATABASE_NAME=\"pgdb\",\n WAREHOUSE_TYPE=\"s3\",\n WAREHOUSE_S3_REGION=\"s3_region\",\n WAREHOUSE_S3_ENDPOINT=\"s3_endpoint\",\n WAREHOUSE_S3_URI=\"s3_uri\",\n \n AUTH_METHOD=\"SERVICE_ACCOUNT\",\n SERVICE_ACCOUNT_ID=\"my_sa\",\n SERVICE_ACCOUNT_SECRET_NAME=\"external_data_source_p\"\n ,\n \n CATALOG_TYPE=\"hive\",\n CATALOG_HIVE_URI=\"hive_uri\"\n ,\n USE_TLS=\"FALSE\"\n );\n ", parameters: 0b 2025-04-17T17:11:04.239270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710709:0, at schemeshard: 72057594046644480 2025-04-17T17:11:04.240834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710711:0, at schemeshard: 72057594046644480 2025-04-17T17:11:04.242673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710710:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: ... ol default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:11:28.990661Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494330905523694850:2364], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:11:28.994839Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-17T17:11:29.005659Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-17T17:11:29.006443Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494330905523694853:2365], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T17:11:29.086626Z node 2 :TX_PROXY ERROR: Actor# [2:7494330909818662189:2431] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:11:29.584259Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:11:30.104900Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-17T17:11:30.813480Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-17T17:11:31.804703Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:11:31.804741Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:31.867980Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T17:11:32.968321Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T17:11:33.454364Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-17T17:11:33.570256Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-17T17:11:37.432135Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710702:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } columns { name: "col2" type { type_id: DOUBLE } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 Trying to start YDB, gRPC: 10937, MsgBus: 8675 2025-04-17T17:11:41.081161Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494330959703541180:2213];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:11:41.095837Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/00262a/r3tmp/tmpae74HV/pdisk_1.dat 2025-04-17T17:11:41.335507Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:41.358032Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:11:41.358119Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:11:41.362091Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10937, node 3 2025-04-17T17:11:41.467992Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:11:41.468014Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:11:41.468024Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:11:41.468159Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/6vg8/00262a/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk5/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/6vg8/00262a/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk5/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout",), {}) >> TBSV::ShardsNotLeftInShardsToDelete [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineCopyRangeWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:84:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:88:2057] recipient: [8:86:2116] Leader for TabletID 72057594037927937 is [8:89:2117] sender: [8:90:2057] recipient: [8:86:2116] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:89:2117] Leader for TabletID 72057594037927937 is [8:89:2117] sender: [8:143:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:84:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:88:2057] recipient: [9:86:2116] Leader for TabletID 72057594037927937 is [9:89:2117] sender: [9:90:2057] recipient: [9:86:2116] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:89:2117] Leader for TabletID 72057594037927937 is [9:89:2117] sender: [9:143:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:85:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:89:2057] recipient: [10:87:2116] Leader for TabletID 72057594037927937 is [10:90:2117] sender: [10:91:2057] recipient: [10:87:2116] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:90:2117] Leader for TabletID 72057594037927937 is [10:90:2117] sender: [10:144:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:88:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:91:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:92:2057] recipient: [11:90:2119] Leader for TabletID 72057594037927937 is [11:93:2120] sender: [11:94:2057] recipient: [11:90:2119] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:93:2120] Leader for TabletID 72057594037927937 is [11:93:2120] sender: [11:147:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... 29:79:2110] !Reboot 72057594037927937 (actor [29:56:2097]) rebooted! !Reboot 72057594037927937 (actor [29:56:2097]) tablet resolver refreshed! new actor is[29:81:2111] Leader for TabletID 72057594037927937 is [29:81:2111] sender: [29:135:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:57:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:74:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:77:2057] recipient: [30:36:2083] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:80:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:81:2057] recipient: [30:79:2110] Leader for TabletID 72057594037927937 is [30:82:2111] sender: [30:83:2057] recipient: [30:79:2110] !Reboot 72057594037927937 (actor [30:56:2097]) rebooted! !Reboot 72057594037927937 (actor [30:56:2097]) tablet resolver refreshed! new actor is[30:82:2111] Leader for TabletID 72057594037927937 is [30:82:2111] sender: [30:136:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:57:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:74:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:80:2057] recipient: [31:36:2083] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:83:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:84:2057] recipient: [31:82:2113] Leader for TabletID 72057594037927937 is [31:85:2114] sender: [31:86:2057] recipient: [31:82:2113] !Reboot 72057594037927937 (actor [31:56:2097]) rebooted! !Reboot 72057594037927937 (actor [31:56:2097]) tablet resolver refreshed! new actor is[31:85:2114] Leader for TabletID 72057594037927937 is [31:85:2114] sender: [31:139:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:57:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:74:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:80:2057] recipient: [32:36:2083] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:83:2057] recipient: [32:82:2113] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:84:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:85:2114] sender: [32:86:2057] recipient: [32:82:2113] !Reboot 72057594037927937 (actor [32:56:2097]) rebooted! !Reboot 72057594037927937 (actor [32:56:2097]) tablet resolver refreshed! new actor is[32:85:2114] Leader for TabletID 72057594037927937 is [32:85:2114] sender: [32:139:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:51:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:57:2057] recipient: [33:51:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:74:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:81:2057] recipient: [33:36:2083] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:84:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:85:2057] recipient: [33:83:2113] Leader for TabletID 72057594037927937 is [33:86:2114] sender: [33:87:2057] recipient: [33:83:2113] !Reboot 72057594037927937 (actor [33:56:2097]) rebooted! !Reboot 72057594037927937 (actor [33:56:2097]) tablet resolver refreshed! new actor is[33:86:2114] Leader for TabletID 72057594037927937 is [33:86:2114] sender: [33:140:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:57:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:74:2057] recipient: [34:14:2061] !Reboot 72057594037927937 (actor [34:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:84:2057] recipient: [34:36:2083] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:86:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:88:2057] recipient: [34:87:2116] Leader for TabletID 72057594037927937 is [34:89:2117] sender: [34:90:2057] recipient: [34:87:2116] !Reboot 72057594037927937 (actor [34:56:2097]) rebooted! !Reboot 72057594037927937 (actor [34:56:2097]) tablet resolver refreshed! new actor is[34:89:2117] Leader for TabletID 72057594037927937 is [34:89:2117] sender: [34:143:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:54:2057] recipient: [35:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:54:2057] recipient: [35:52:2095] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:57:2057] recipient: [35:52:2095] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:74:2057] recipient: [35:14:2061] !Reboot 72057594037927937 (actor [35:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:84:2057] recipient: [35:36:2083] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:87:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:88:2057] recipient: [35:86:2116] Leader for TabletID 72057594037927937 is [35:89:2117] sender: [35:90:2057] recipient: [35:86:2116] !Reboot 72057594037927937 (actor [35:56:2097]) rebooted! !Reboot 72057594037927937 (actor [35:56:2097]) tablet resolver refreshed! new actor is[35:89:2117] Leader for TabletID 72057594037927937 is [35:89:2117] sender: [35:143:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:54:2057] recipient: [36:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:54:2057] recipient: [36:51:2095] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:57:2057] recipient: [36:51:2095] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:74:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:85:2057] recipient: [36:36:2083] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:88:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:89:2057] recipient: [36:87:2116] Leader for TabletID 72057594037927937 is [36:90:2117] sender: [36:91:2057] recipient: [36:87:2116] !Reboot 72057594037927937 (actor [36:56:2097]) rebooted! !Reboot 72057594037927937 (actor [36:56:2097]) tablet resolver refreshed! new actor is[36:90:2117] Leader for TabletID 72057594037927937 is [36:90:2117] sender: [36:144:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:54:2057] recipient: [37:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:54:2057] recipient: [37:50:2095] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:57:2057] recipient: [37:50:2095] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:74:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:88:2057] recipient: [37:36:2083] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:91:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:92:2057] recipient: [37:90:2119] Leader for TabletID 72057594037927937 is [37:93:2120] sender: [37:94:2057] recipient: [37:90:2119] !Reboot 72057594037927937 (actor [37:56:2097]) rebooted! !Reboot 72057594037927937 (actor [37:56:2097]) tablet resolver refreshed! new actor is[37:93:2120] Leader for TabletID 72057594037927937 is [37:93:2120] sender: [37:147:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:54:2057] recipient: [38:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:54:2057] recipient: [38:52:2095] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:57:2057] recipient: [38:52:2095] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:74:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:88:2057] recipient: [38:36:2083] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:91:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:92:2057] recipient: [38:90:2119] Leader for TabletID 72057594037927937 is [38:93:2120] sender: [38:94:2057] recipient: [38:90:2119] !Reboot 72057594037927937 (actor [38:56:2097]) rebooted! !Reboot 72057594037927937 (actor [38:56:2097]) tablet resolver refreshed! new actor is[38:93:2120] Leader for TabletID 72057594037927937 is [38:93:2120] sender: [38:147:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:54:2057] recipient: [39:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:54:2057] recipient: [39:51:2095] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:57:2057] recipient: [39:51:2095] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:74:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:89:2057] recipient: [39:36:2083] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:92:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:93:2057] recipient: [39:91:2119] Leader for TabletID 72057594037927937 is [39:94:2120] sender: [39:95:2057] recipient: [39:91:2119] !Reboot 72057594037927937 (actor [39:56:2097]) rebooted! !Reboot 72057594037927937 (actor [39:56:2097]) tablet resolver refreshed! new actor is[39:94:2120] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:54:2057] recipient: [40:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:54:2057] recipient: [40:52:2095] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:57:2057] recipient: [40:52:2095] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:74:2057] recipient: [40:14:2061] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::PostgreSQLSelectCount 2025-04-17 17:11:42,397 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-17 17:11:42,526 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 60 secs timeout. Process tree before termination: pid rss ref pdirt 21689 46.1M 46.1M 23.0M test_tool run_ut @/home/runner/.ya/build/build_root/6vg8/0025f2/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk7/testing_out_stuff/test 22032 1.4G 1.5G 982M └─ ydb-core-kqp-ut-federated_query-generic_ut --trace-path-append /home/runner/.ya/build/build_root/6vg8/0025f2/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unitt Test command err: Trying to start YDB, gRPC: 4615, MsgBus: 7426 2025-04-17T17:10:47.346418Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494330729400090650:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:10:47.346749Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/0025f2/r3tmp/tmpNziUmi/pdisk_1.dat 2025-04-17T17:10:47.812958Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:10:47.827928Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:10:47.828025Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:10:47.829745Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4615, node 1 2025-04-17T17:10:47.896723Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:10:47.896754Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:10:47.896761Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:10:47.896889Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7426 TClient is connected to server localhost:7426 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:10:48.525408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:10:52.347518Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494330729400090650:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:10:52.347584Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:10:55.277792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494330763759829554:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:55.277876Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:56.349096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-17T17:10:57.075932Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494330772349764283:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:57.084340Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494330772349764278:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:57.084792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:57.088769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-17T17:10:57.136725Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494330772349764285:2358], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T17:10:57.227365Z node 1 :TX_PROXY ERROR: Actor# [1:7494330772349764348:2427] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:10:59.967858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:11:01.219547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:1, at schemeshard: 72057594046644480 2025-04-17T17:11:02.680817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T17:11:02.814281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:11:02.814312Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:04.226695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-17T17:11:04.700929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T17:11:05.166663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-17T17:11:05.201108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-17T17:11:10.853022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710709:0, at schemeshard: 72057594046644480 2025-04-17T17:11:10.934569Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2bptf3e6q2n9g9c1ketwah", SessionId: ydb://session/3?node_id=1&id=NGM4Zjc0MDktZTA2YzJkYzctZmRlMjkzNGUtZWVhZmY3MmY=, Slow query, duration: 15.726635s, status: SUCCESS, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE OBJECT external_data_source_password (TYPE SECRET) WITH (value=qwerty12345);\n\n CREATE EXTERNAL DATA SOURCE external_data_source WITH (\n SOURCE_TYPE=\"PostgreSQL\",\n LOCATION=\"localhost:5432\",\n AUTH_METHOD=\"BASIC\",\n LOGIN=\"crab\",\n PASSWORD_SECRET_NAME=\"external_data_source_password\",\n USE_TLS=\"TRUE\",\n PROTOCOL=\"NATIVE\",\n DATABASE_NAME=\"pgdb\",\n SCHEMA=\"public\"\n );\n ", parameters: 0b 2025-04-17T17:11:11.220688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710711:0, at schemeshard: 72057594046644480 2025-04-17T17:11:11.226196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710712:0, at schemeshard: 72057594046644480 2025-04-17T17:11:11.229155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710713:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data ... ose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-17T17:11:31.913780Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494330917570971393:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T17:11:32.007373Z node 2 :TX_PROXY ERROR: Actor# [2:7494330921865938729:2399] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:11:32.414276Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:11:32.446967Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494330900391101424:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:11:32.447024Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:11:32.849318Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-17T17:11:33.357527Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-17T17:11:34.128432Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T17:11:34.676641Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-17T17:11:35.255274Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-17T17:11:35.419306Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-17T17:11:37.928093Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710702:0, at schemeshard: 72057594046644480 2025-04-17T17:11:37.939222Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710702, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } columns { name: "col2" type { type_id: DOUBLE } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 Trying to start YDB, gRPC: 10000, MsgBus: 20792 2025-04-17T17:11:39.790939Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494330951996479098:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:11:39.835004Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/0025f2/r3tmp/tmpWVLzAB/pdisk_1.dat 2025-04-17T17:11:40.490488Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:40.543854Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:11:40.543947Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:11:40.555322Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10000, node 3 2025-04-17T17:11:41.083776Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:11:41.083798Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:11:41.083807Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:11:41.083925Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20792 TClient is connected to server localhost:20792 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:11:42.081478Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:11:42.089983Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/6vg8/0025f2/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk7/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/6vg8/0025f2/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk7/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout",), {}) >> TKeyValueTest::TestConcatToLongKey [GOOD] >> TBSV::ShouldLimitBlockStoreVolumeDropRate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::NotIdempotent [GOOD] Test command err: Trying to start YDB, gRPC: 65412, MsgBus: 20169 2025-04-17T17:11:34.175757Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494330931883547298:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:11:34.176288Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/00245a/r3tmp/tmp99lUSY/pdisk_1.dat 2025-04-17T17:11:34.704654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:11:34.704788Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:11:34.709450Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:11:34.739571Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65412, node 1 2025-04-17T17:11:34.828726Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:11:34.828757Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:11:34.828765Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:11:34.828888Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20169 2025-04-17T17:11:39.173990Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494330931883547298:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:11:39.174544Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:20169 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:11:39.557836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:11:39.603324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:11:39.781225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:11:40.865889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:11:41.206462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:11:44.392652Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494330974833221940:2414], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:11:44.393411Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:11:45.990678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:11:46.062593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:11:46.187126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:11:46.358173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:11:46.523065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:11:46.937271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:11:47.158486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:11:47.549981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494330987718124562:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:11:47.550045Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:11:47.550204Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494330987718124567:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:11:47.597497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:11:47.652510Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494330987718124569:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:11:47.752461Z node 1 :TX_PROXY ERROR: Actor# [1:7494330987718124627:3614] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:11:49.768838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:11:49.768866Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:51.308850Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494331000603026947:2547], status: GENERIC_ERROR, issues:
: Error: Table intent determination, code: 1040
:3:43: Error: Batch update is only supported for idempotent updates. 2025-04-17T17:11:51.310866Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjM4NjVlMTUtOTg4MjEyMTMtNWEyZWViNTQtODE2Nzg4Mw==, ActorId: [1:7494331000603026874:2531], ActorState: ExecuteState, TraceId: 01js2brgqk6npjxqqewx9f7tha, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T17:11:51.456000Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494331004897994295:2552], status: GENERIC_ERROR, issues:
: Error: Table intent determination, code: 1040
:3:43: Error: Batch update is only supported for idempotent updates. 2025-04-17T17:11:51.476611Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjM4NjVlMTUtOTg4MjEyMTMtNWEyZWViNTQtODE2Nzg4Mw==, ActorId: [1:7494331000603026874:2531], ActorState: ExecuteState, TraceId: 01js2brha55zg4dxpbstbyh0vg, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T17:11:51.859255Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494331004897994329:2565], status: GENERIC_ERROR, issues:
: Error: Table intent determination, code: 1040
:3:51: Error: Batch update is only supported for idempotent updates. 2025-04-17T17:11:51.860294Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjM4NjVlMTUtOTg4MjEyMTMtNWEyZWViNTQtODE2Nzg4Mw==, ActorId: [1:7494331000603026874:2531], ActorState: ExecuteState, TraceId: 01js2brhnqebxagpfkqha8tvb3, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: |75.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TBSV::CleanupDroppedVolumesOnRestart ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHadoopBasicSelectCount 2025-04-17 17:11:42,344 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-17 17:11:42,679 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 60 secs timeout. Process tree before termination: pid rss ref pdirt 21675 46.0M 46.0M 23.0M test_tool run_ut @/home/runner/.ya/build/build_root/6vg8/002601/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk1/testing_out_stuff/test 21909 1.3G 1.3G 867M └─ ydb-core-kqp-ut-federated_query-generic_ut --trace-path-append /home/runner/.ya/build/build_root/6vg8/002601/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unitt Test command err: Trying to start YDB, gRPC: 11599, MsgBus: 14109 2025-04-17T17:10:47.073035Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494330728188864542:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:10:47.073081Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/002601/r3tmp/tmp1kP6dq/pdisk_1.dat 2025-04-17T17:10:47.433532Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:10:47.446350Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:10:47.446415Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:10:47.448692Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11599, node 1 2025-04-17T17:10:47.536326Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:10:47.536348Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:10:47.536356Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:10:47.536464Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14109 TClient is connected to server localhost:14109 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:10:48.117000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:10:52.073476Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494330728188864542:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:10:52.073542Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:10:54.149509Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494330753958668991:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:54.149693Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:54.618875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-17T17:10:54.860267Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494330758253636413:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:54.860547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:54.861161Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494330758253636418:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:54.879182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-17T17:10:54.955677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494330758253636420:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T17:10:55.017444Z node 1 :TX_PROXY ERROR: Actor# [1:7494330762548603757:2410] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:10:57.031441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:10:58.122991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:1, at schemeshard: 72057594046644480 2025-04-17T17:11:00.001544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T17:11:01.646805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T17:11:02.432670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:11:02.432940Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:03.302946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T17:11:04.109160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-17T17:11:04.176977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-17T17:11:06.562525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710704:0, at schemeshard: 72057594046644480 2025-04-17T17:11:06.593315Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2bps65cxtxhw7z8c619sva", SessionId: ydb://session/3?node_id=1&id=YTE3YWRkOTUtMmM3YmU0M2ItMTEzZTQ0NWItZWM0N2ZjZmM=, Slow query, duration: 12.672437s, status: SUCCESS, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE OBJECT external_data_source_p (TYPE SECRET) WITH (value=qwerty12345);\n\n CREATE EXTERNAL DATA SOURCE external_data_source WITH (\n SOURCE_TYPE=\"Iceberg\",\n DATABASE_NAME=\"pgdb\",\n WAREHOUSE_TYPE=\"s3\",\n WAREHOUSE_S3_REGION=\"s3_region\",\n WAREHOUSE_S3_ENDPOINT=\"s3_endpoint\",\n WAREHOUSE_S3_URI=\"s3_uri\",\n \n AUTH_METHOD=\"BASIC\",\n LOGIN=\"crab\",\n PASSWORD_SECRET_NAME=\"external_data_source_p\"\n ,\n \n CATALOG_TYPE=\"hadoop\"\n ,\n USE_TLS=\"FALSE\"\n );\n ", parameters: 0b 2025-04-17T17:11:06.919510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710705:0, at schemeshard: 72057594046644480 2025-04-17T17:11:06.990299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710707:0, at schemeshard: 72057594046644480 2025-04-17T17:11:07.034337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710706:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalo ... 80 waiting... 2025-04-17T17:11:27.865697Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:11:30.063599Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494330890685228127:2089];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:11:30.070046Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:11:30.947599Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494330912160065218:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:11:30.947670Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:11:30.968845Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-17T17:11:31.042504Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494330916455032633:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:11:31.042596Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:11:31.042971Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494330916455032639:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:11:31.046447Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-17T17:11:31.069467Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494330916455032641:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T17:11:31.131022Z node 2 :TX_PROXY ERROR: Actor# [2:7494330916455032716:2416] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:11:31.791598Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:11:32.955602Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:1, at schemeshard: 72057594046644480 2025-04-17T17:11:34.294259Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T17:11:34.933304Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T17:11:35.817058Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T17:11:37.031206Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-17T17:11:37.135996Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-17T17:11:40.275633Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:11:40.275659Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:40.821083Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710708:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } columns { name: "col2" type { type_id: DOUBLE } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 Trying to start YDB, gRPC: 29463, MsgBus: 11899 Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/6vg8/002601/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk1/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/6vg8/002601/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk1/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout",), {}) |75.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TTxDataShardMiniKQL::CrossShard_6_Local [GOOD] >> TTxDataShardMiniKQL::MemoryUsageImmediateHugeTx ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::ShardsNotLeftInShardsToDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:11:48.482348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:11:48.482415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:11:48.482447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:11:48.482858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:11:48.483114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:11:48.483578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:11:48.483841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:11:48.489268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:11:48.492632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:11:49.319060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:11:49.319357Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:49.556140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:11:49.556627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:11:49.556846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:11:49.616990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:11:49.619156Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:11:49.636972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:11:49.637733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:11:49.681915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:11:49.686564Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:11:49.686872Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:11:49.688203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:11:49.688253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:11:49.688293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:11:49.688412Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:11:49.757120Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:11:51.743991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:11:51.744309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:11:51.744480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:11:51.744661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:11:51.744704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:11:51.756590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:11:51.757525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:11:51.759974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:11:51.760711Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:11:51.761003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:11:51.761295Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:11:51.776131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:11:51.776190Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:11:51.776222Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:11:51.794226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:11:51.794556Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:11:51.794861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:11:51.795144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:11:51.807088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:11:51.820813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:11:51.823034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:11:51.841432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:11:51.842379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:11:51.842960Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:11:51.849310Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:11:51.849605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:11:51.851693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:11:51.853380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:11:51.868068Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:11:51.868138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:11:51.868310Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:11:51.868351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:11:51.868667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:11:51.868704Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:11:51.868778Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:11:51.868804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:11:51.868840Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:11:51.868875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:11:51.868909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:11:51.868940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:11:51.868986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:11:51.869008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:11:51.869054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:11:51.869085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:11:51.869108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:11:51.870513Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:11:51.870602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:11:51.870628Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... cated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-17T17:11:53.666285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T17:11:53.667671Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-17T17:11:53.672988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-17T17:11:53.673574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409546 Forgetting tablet 72075186233409547 2025-04-17T17:11:53.674398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T17:11:53.696026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-17T17:11:53.697409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-04-17T17:11:53.716901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:11:53.717527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:11:53.718326Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropBlockStoreVolume TPropose, operationId: 102:0 HandleReply TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2025-04-17T17:11:53.718940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T17:11:53.721428Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:11:53.722006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:11:53.722548Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:11:53.722824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:11:53.723357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:11:53.723824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T17:11:53.725519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-04-17T17:11:53.726101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:11:53.726386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-17T17:11:53.727152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-17T17:11:53.727840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T17:11:53.728385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-17T17:11:53.728942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-17T17:11:53.728966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-17T17:11:53.736047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-17T17:11:53.736097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-17T17:11:53.736784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-17T17:11:53.736821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-17T17:11:53.737346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-17T17:11:53.737377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-17T17:11:53.737492Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:11:53.737527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:11:53.737641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:11:53.738812Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:11:53.738840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-17T17:11:53.739073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: Erasing txId 102 2025-04-17T17:11:53.742805Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:11:53.743082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:11:53.743104Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-17T17:11:53.743340Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-17T17:11:53.744925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T17:11:53.745271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:11:53.745311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T17:11:53.745355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:11:53.746239Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:11:53.746289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:11:53.746309Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-17T17:11:53.746329Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-17T17:11:53.746521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:11:53.746567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-17T17:11:53.750751Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 2025-04-17T17:11:53.751397Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 2025-04-17T17:11:53.752274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-17T17:11:53.755000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-17T17:11:53.756300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T17:11:53.761796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-17T17:11:53.761990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T17:11:53.762550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-17T17:11:53.762881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-17T17:11:53.763810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-17T17:11:53.764097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-17T17:11:53.773291Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-17T17:11:53.773381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T17:11:53.773409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:392:2372] TestWaitNotification: OK eventTxId 102 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2025-04-17T17:11:53.776675Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-04-17T17:11:53.776956Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 { Type { Kind: Struct Struct { Member { Name: "ShardsToDelete" Type { Kind: Optional Optional { Item { Kind: Struct Struct { Member { Name: "List" Type { Kind: List List { Item { Kind: Struct Struct { Member { Name: "ShardIdx" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } Member { Name: "Truncated" Type { Kind: Data Data { Scheme: 6 } } } } } } } } } } Value { Struct { Optional { Struct { } Struct { Bool: false } } } } } |75.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest |75.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |75.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHadoopSaSelectCount 2025-04-17 17:11:42,132 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-17 17:11:42,327 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 60 secs timeout. Process tree before termination: pid rss ref pdirt 21588 46.0M 46.0M 23.0M test_tool run_ut @/home/runner/.ya/build/build_root/6vg8/002616/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk2/testing_out_stuff/test 21835 1.4G 1.4G 972M └─ ydb-core-kqp-ut-federated_query-generic_ut --trace-path-append /home/runner/.ya/build/build_root/6vg8/002616/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unitt Test command err: Trying to start YDB, gRPC: 23320, MsgBus: 14683 2025-04-17T17:10:46.760324Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494330723341287742:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:10:46.760415Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/002616/r3tmp/tmpazMZ2C/pdisk_1.dat 2025-04-17T17:10:47.144186Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:10:47.159468Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:10:47.159571Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:10:47.161766Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23320, node 1 2025-04-17T17:10:47.219779Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:10:47.219808Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:10:47.219856Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:10:47.220004Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14683 TClient is connected to server localhost:14683 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:10:47.694136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:10:47.706887Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:10:51.764642Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494330723341287742:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:10:51.765051Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:10:52.556504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494330749111092186:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:52.556587Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:53.167286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-17T17:10:53.393726Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494330753406059608:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:53.393784Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:53.393840Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494330753406059613:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:53.397218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-17T17:10:53.413209Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494330753406059615:2352], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T17:10:53.486148Z node 1 :TX_PROXY ERROR: Actor# [1:7494330753406059655:2407] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:10:55.144756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:10:55.669375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:1, at schemeshard: 72057594046644480 2025-04-17T17:10:56.965060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T17:10:58.452516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T17:11:00.385549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T17:11:01.046574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-17T17:11:01.250875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-17T17:11:02.132395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:11:02.132422Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:04.522689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710706:0, at schemeshard: 72057594046644480 2025-04-17T17:11:04.529686Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2bpqvp2phnpvgh4f4bnypx", SessionId: ydb://session/3?node_id=1&id=ZjA2NWM2YS01N2EwYTk4Ny00NDRkZTQyOS05YmQ4NTZjZQ==, Slow query, duration: 11.993056s, status: SUCCESS, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE OBJECT external_data_source_p (TYPE SECRET) WITH (value=qwerty12345);\n\n CREATE EXTERNAL DATA SOURCE external_data_source WITH (\n SOURCE_TYPE=\"Iceberg\",\n DATABASE_NAME=\"pgdb\",\n WAREHOUSE_TYPE=\"s3\",\n WAREHOUSE_S3_REGION=\"s3_region\",\n WAREHOUSE_S3_ENDPOINT=\"s3_endpoint\",\n WAREHOUSE_S3_URI=\"s3_uri\",\n \n AUTH_METHOD=\"SERVICE_ACCOUNT\",\n SERVICE_ACCOUNT_ID=\"my_sa\",\n SERVICE_ACCOUNT_SECRET_NAME=\"external_data_source_p\"\n ,\n \n CATALOG_TYPE=\"hadoop\"\n ,\n USE_TLS=\"FALSE\"\n );\n ", parameters: 0b 2025-04-17T17:11:04.554828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710708:0, at schemeshard: 72057594046644480 2025-04-17T17:11:04.556214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710709:0, at schemeshard: 72057594046644480 2025-04-17T17:11:04.558540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710707:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { ... 480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:11:30.273700Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:11:30.915563Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-17T17:11:31.799158Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T17:11:32.312750Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T17:11:32.859445Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-17T17:11:33.304372Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-17T17:11:33.368813Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-17T17:11:34.440373Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:11:34.440404Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:35.813715Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710700:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } columns { name: "col2" type { type_id: DOUBLE } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 Trying to start YDB, gRPC: 63435, MsgBus: 10370 2025-04-17T17:11:38.742951Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494330949133491981:2056];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:11:38.742995Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/002616/r3tmp/tmpa4dV7N/pdisk_1.dat 2025-04-17T17:11:39.156361Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:39.186620Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:11:39.186705Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:11:39.190158Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63435, node 3 2025-04-17T17:11:39.448904Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:11:39.448933Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:11:39.448944Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:11:39.449096Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10370 TClient is connected to server localhost:10370 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:11:41.116819Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:11:41.128151Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/6vg8/002616/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk2/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/6vg8/002616/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk2/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout",), {}) |75.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |75.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |75.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |75.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestConcatToLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:84:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:88:2057] recipient: [8:86:2116] Leader for TabletID 72057594037927937 is [8:89:2117] sender: [8:90:2057] recipient: [8:86:2116] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:89:2117] Leader for TabletID 72057594037927937 is [8:89:2117] sender: [8:143:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:84:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:88:2057] recipient: [9:86:2116] Leader for TabletID 72057594037927937 is [9:89:2117] sender: [9:90:2057] recipient: [9:86:2116] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:89:2117] Leader for TabletID 72057594037927937 is [9:89:2117] sender: [9:143:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:85:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:89:2057] recipient: [10:88:2116] Leader for TabletID 72057594037927937 is [10:90:2117] sender: [10:91:2057] recipient: [10:88:2116] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:90:2117] Leader for TabletID 72057594037927937 is [10:90:2117] sender: [10:144:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:87:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:90:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:91:2057] recipient: [11:89:2118] Leader for TabletID 72057594037927937 is [11:92:2119] sender: [11:93:2057] recipient: [11:89:2118] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:92:2119] Leader for TabletID 72057594037927937 is [11:92:2119] sender: [11:146:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... 7 is [33:56:2097] sender: [33:92:2057] recipient: [33:36:2083] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:95:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:96:2057] recipient: [33:94:2121] Leader for TabletID 72057594037927937 is [33:97:2122] sender: [33:98:2057] recipient: [33:94:2121] !Reboot 72057594037927937 (actor [33:56:2097]) rebooted! !Reboot 72057594037927937 (actor [33:56:2097]) tablet resolver refreshed! new actor is[33:97:2122] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:57:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:74:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:54:2057] recipient: [35:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:54:2057] recipient: [35:52:2095] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:57:2057] recipient: [35:52:2095] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:74:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:54:2057] recipient: [36:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:54:2057] recipient: [36:51:2095] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:57:2057] recipient: [36:51:2095] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:74:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:76:2057] recipient: [36:36:2083] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:79:2057] recipient: [36:78:2110] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:80:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [36:81:2111] sender: [36:82:2057] recipient: [36:78:2110] !Reboot 72057594037927937 (actor [36:56:2097]) rebooted! !Reboot 72057594037927937 (actor [36:56:2097]) tablet resolver refreshed! new actor is[36:81:2111] Leader for TabletID 72057594037927937 is [36:81:2111] sender: [36:135:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:54:2057] recipient: [37:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:54:2057] recipient: [37:50:2095] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:57:2057] recipient: [37:50:2095] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:74:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:56:2097]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:76:2057] recipient: [37:36:2083] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:79:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:80:2057] recipient: [37:78:2110] Leader for TabletID 72057594037927937 is [37:81:2111] sender: [37:82:2057] recipient: [37:78:2110] !Reboot 72057594037927937 (actor [37:56:2097]) rebooted! !Reboot 72057594037927937 (actor [37:56:2097]) tablet resolver refreshed! new actor is[37:81:2111] Leader for TabletID 72057594037927937 is [37:81:2111] sender: [37:135:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:54:2057] recipient: [38:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:54:2057] recipient: [38:52:2095] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:57:2057] recipient: [38:52:2095] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:74:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:77:2057] recipient: [38:36:2083] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:80:2057] recipient: [38:79:2110] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:81:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [38:82:2111] sender: [38:83:2057] recipient: [38:79:2110] !Reboot 72057594037927937 (actor [38:56:2097]) rebooted! !Reboot 72057594037927937 (actor [38:56:2097]) tablet resolver refreshed! new actor is[38:82:2111] Leader for TabletID 72057594037927937 is [38:82:2111] sender: [38:136:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:54:2057] recipient: [39:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:54:2057] recipient: [39:51:2095] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:57:2057] recipient: [39:51:2095] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:74:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:80:2057] recipient: [39:36:2083] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:83:2057] recipient: [39:82:2113] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:84:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [39:85:2114] sender: [39:86:2057] recipient: [39:82:2113] !Reboot 72057594037927937 (actor [39:56:2097]) rebooted! !Reboot 72057594037927937 (actor [39:56:2097]) tablet resolver refreshed! new actor is[39:85:2114] Leader for TabletID 72057594037927937 is [39:85:2114] sender: [39:139:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:54:2057] recipient: [40:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:54:2057] recipient: [40:52:2095] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:57:2057] recipient: [40:52:2095] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:74:2057] recipient: [40:14:2061] !Reboot 72057594037927937 (actor [40:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:80:2057] recipient: [40:36:2083] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:83:2057] recipient: [40:82:2113] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:84:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [40:85:2114] sender: [40:86:2057] recipient: [40:82:2113] !Reboot 72057594037927937 (actor [40:56:2097]) rebooted! !Reboot 72057594037927937 (actor [40:56:2097]) tablet resolver refreshed! new actor is[40:85:2114] Leader for TabletID 72057594037927937 is [40:85:2114] sender: [40:139:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:54:2057] recipient: [41:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:54:2057] recipient: [41:51:2095] Leader for TabletID 72057594037927937 is [41:56:2097] sender: [41:57:2057] recipient: [41:51:2095] Leader for TabletID 72057594037927937 is [41:56:2097] sender: [41:74:2057] recipient: [41:14:2061] !Reboot 72057594037927937 (actor [41:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [41:56:2097] sender: [41:81:2057] recipient: [41:36:2083] Leader for TabletID 72057594037927937 is [41:56:2097] sender: [41:84:2057] recipient: [41:14:2061] Leader for TabletID 72057594037927937 is [41:56:2097] sender: [41:85:2057] recipient: [41:83:2113] Leader for TabletID 72057594037927937 is [41:86:2114] sender: [41:87:2057] recipient: [41:83:2113] !Reboot 72057594037927937 (actor [41:56:2097]) rebooted! !Reboot 72057594037927937 (actor [41:56:2097]) tablet resolver refreshed! new actor is[41:86:2114] Leader for TabletID 72057594037927937 is [41:86:2114] sender: [41:140:2057] recipient: [41:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:54:2057] recipient: [42:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:54:2057] recipient: [42:51:2095] Leader for TabletID 72057594037927937 is [42:56:2097] sender: [42:57:2057] recipient: [42:51:2095] Leader for TabletID 72057594037927937 is [42:56:2097] sender: [42:74:2057] recipient: [42:14:2061] !Reboot 72057594037927937 (actor [42:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [42:56:2097] sender: [42:84:2057] recipient: [42:36:2083] Leader for TabletID 72057594037927937 is [42:56:2097] sender: [42:87:2057] recipient: [42:14:2061] Leader for TabletID 72057594037927937 is [42:56:2097] sender: [42:88:2057] recipient: [42:86:2116] Leader for TabletID 72057594037927937 is [42:89:2117] sender: [42:90:2057] recipient: [42:86:2116] !Reboot 72057594037927937 (actor [42:56:2097]) rebooted! !Reboot 72057594037927937 (actor [42:56:2097]) tablet resolver refreshed! new actor is[42:89:2117] Leader for TabletID 72057594037927937 is [42:89:2117] sender: [42:143:2057] recipient: [42:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:54:2057] recipient: [43:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:54:2057] recipient: [43:51:2095] Leader for TabletID 72057594037927937 is [43:56:2097] sender: [43:57:2057] recipient: [43:51:2095] Leader for TabletID 72057594037927937 is [43:56:2097] sender: [43:74:2057] recipient: [43:14:2061] !Reboot 72057594037927937 (actor [43:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [43:56:2097] sender: [43:84:2057] recipient: [43:36:2083] Leader for TabletID 72057594037927937 is [43:56:2097] sender: [43:87:2057] recipient: [43:14:2061] Leader for TabletID 72057594037927937 is [43:56:2097] sender: [43:88:2057] recipient: [43:86:2116] Leader for TabletID 72057594037927937 is [43:89:2117] sender: [43:90:2057] recipient: [43:86:2116] !Reboot 72057594037927937 (actor [43:56:2097]) rebooted! !Reboot 72057594037927937 (actor [43:56:2097]) tablet resolver refreshed! new actor is[43:89:2117] Leader for TabletID 72057594037927937 is [43:89:2117] sender: [43:143:2057] recipient: [43:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:54:2057] recipient: [44:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:54:2057] recipient: [44:52:2095] Leader for TabletID 72057594037927937 is [44:56:2097] sender: [44:57:2057] recipient: [44:52:2095] Leader for TabletID 72057594037927937 is [44:56:2097] sender: [44:74:2057] recipient: [44:14:2061] !Reboot 72057594037927937 (actor [44:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [44:56:2097] sender: [44:85:2057] recipient: [44:36:2083] Leader for TabletID 72057594037927937 is [44:56:2097] sender: [44:88:2057] recipient: [44:14:2061] Leader for TabletID 72057594037927937 is [44:56:2097] sender: [44:89:2057] recipient: [44:87:2116] Leader for TabletID 72057594037927937 is [44:90:2117] sender: [44:91:2057] recipient: [44:87:2116] !Reboot 72057594037927937 (actor [44:56:2097]) rebooted! !Reboot 72057594037927937 (actor [44:56:2097]) tablet resolver refreshed! new actor is[44:90:2117] Leader for TabletID 72057594037927937 is [44:90:2117] sender: [44:144:2057] recipient: [44:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:54:2057] recipient: [45:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:54:2057] recipient: [45:51:2095] Leader for TabletID 72057594037927937 is [45:56:2097] sender: [45:57:2057] recipient: [45:51:2095] Leader for TabletID 72057594037927937 is [45:56:2097] sender: [45:74:2057] recipient: [45:14:2061] |75.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> Worker::Basic |75.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |75.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite+Volatile [GOOD] >> DataShardWrite::InsertImmediate |75.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |75.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk >> BasicStatistics::TwoTables |75.0%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |75.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk >> TTxDataShardMiniKQL::MemoryUsageImmediateHugeTx [GOOD] |75.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> KqpBatchDelete::MultiStatement [GOOD] |75.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |75.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::MemoryUsageImmediateHugeTx [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:113:2057] recipient: [1:107:2139] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:113:2057] recipient: [1:107:2139] Leader for TabletID 9437184 is [1:126:2151] sender: [1:129:2057] recipient: [1:107:2139] 2025-04-17T17:10:20.849593Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:10:20.862195Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:10:20.862558Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:126:2151] 2025-04-17T17:10:20.862762Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:10:20.934664Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:10:21.357860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:10:21.357935Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:10:21.383235Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:10:21.394598Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:10:21.422095Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-17T17:10:21.422539Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-17T17:10:21.422950Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-17T17:10:21.425976Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:10:21.426429Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:10:21.426645Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:195:2151] in generation 2 Leader for TabletID 9437184 is [1:126:2151] sender: [1:211:2057] recipient: [1:14:2061] 2025-04-17T17:10:21.682143Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:10:21.874835Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-17T17:10:21.875502Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:10:21.875904Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:216:2214] 2025-04-17T17:10:21.876138Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-17T17:10:21.876164Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-17T17:10:21.876196Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:10:21.877608Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:10:21.877799Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:10:21.879403Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-17T17:10:21.880264Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-17T17:10:21.880676Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T17:10:21.881261Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:10:21.881683Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-17T17:10:21.882068Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T17:10:21.882276Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T17:10:21.882691Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-17T17:10:21.882920Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T17:10:21.883972Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:212:2211], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:10:21.884424Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:10:21.884653Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:210:2210], serverId# [1:212:2211], sessionId# [0:0:0] 2025-04-17T17:10:21.898295Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:126:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-17T17:10:21.898511Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:10:21.899046Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T17:10:21.899946Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-17T17:10:21.900142Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-17T17:10:21.900761Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-17T17:10:21.901150Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T17:10:21.901553Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-17T17:10:21.902134Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-17T17:10:21.902363Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T17:10:21.904430Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-17T17:10:21.904628Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-17T17:10:21.904827Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-17T17:10:21.905025Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-17T17:10:21.905059Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-17T17:10:21.905252Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-17T17:10:21.905447Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-17T17:10:21.906002Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-17T17:10:21.906193Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-17T17:10:21.926538Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T17:10:21.926768Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T17:10:21.926956Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-17T17:10:21.927164Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-17T17:10:21.927531Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-17T17:10:21.934738Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:222:2220], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:10:21.935124Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:10:21.935481Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:221:2219], serverId# [1:222:2220], sessionId# [0:0:0] 2025-04-17T17:10:21.939778Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:126:2151]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2025-04-17T17:10:21.939810Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:10:21.939916Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2025-04-17T17:10:21.939959Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-17T17:10:21.939987Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2025-04-17T17:10:21.940011Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit PlanQueue 2025-04-17T17:10:21.942568Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2025-04-17T17:10:21.942610Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:10:21.942780Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:10:21.942809Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:10:21.942854Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T17:10:21.942883Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:10:21.942910Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T17:10:21.942937Z node 1 :TX_DATASHARD DEBUG: Found ready operation [2:1] in PlanQueue unit at 9437184 2025-04-17T17:10:21.942962Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit PlanQueue 2025-04-17T17:10:21.942990Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-17T17:10:21.943024Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit PlanQueue 2025-04-17T17:10:21.943054Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit LoadTxDetails 2025-04-17T17:10:21.943076Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit LoadTxDetails 2025-04-17T17:10:21.943201Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 2:1 keys extracted: 0 2025-04-17T17:10:21.943228Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-17T17:10:21.943258Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit LoadTxDetails 2025-04-17T17:10:21.943274Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-17T17:10:21.943291Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-17T17:10:21.943330Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T17:10:21.943355Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-17T17:10:21.943382Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T17:10:21.943410Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T17:10:21.943442Z node 1 :TX_DATASHARD TRACE: Operation [2:1] is the new logically complete end at 9437184 2025-04-17T17:10:21.943480Z node 1 :TX_DATASHARD TRAC ... :58.275578Z node 23 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T17:11:58.275676Z node 23 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [23:281:2265], Recipient [23:235:2228]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 4200 Status: OK ServerId: [23:285:2269] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-17T17:11:58.275710Z node 23 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-17T17:11:58.275797Z node 23 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [23:123:2149], Recipient [23:235:2228]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 1 2025-04-17T17:11:58.275829Z node 23 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-04-17T17:11:58.275869Z node 23 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 1 datashard 9437184 state Ready 2025-04-17T17:11:58.275926Z node 23 :TX_DATASHARD DEBUG: 9437184 Got TEvSchemaChangedResult from SS at 9437184 2025-04-17T17:11:58.290011Z node 23 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [23:281:2265], Recipient [23:235:2228]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 4200 ClientId: [23:281:2265] ServerId: [23:285:2269] } 2025-04-17T17:11:58.290080Z node 23 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-17T17:11:58.329782Z node 23 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [23:292:2274], Recipient [23:235:2228]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:11:58.329854Z node 23 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:11:58.329906Z node 23 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [23:290:2273], serverId# [23:292:2274], sessionId# [0:0:0] 2025-04-17T17:11:58.330052Z node 23 :TX_DATASHARD TRACE: StateWork, received event# 268830214, Sender [23:289:2272], Recipient [23:235:2228]: NKikimrTabletBase.TEvGetCounters 2025-04-17T17:11:58.365815Z node 23 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [23:99:2134], Recipient [23:235:2228]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 99 RawX2: 98784249942 } 2025-04-17T17:11:58.365893Z node 23 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2025-04-17T17:11:58.366340Z node 23 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [23:294:2276], Recipient [23:235:2228]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:11:58.366387Z node 23 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:11:58.366446Z node 23 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [23:293:2275], serverId# [23:294:2276], sessionId# [0:0:0] 2025-04-17T17:11:58.366613Z node 23 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [23:99:2134], Recipient [23:235:2228]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 99 RawX2: 98784249942 } TxBody: "\032\324\002\037\002\006Arg\005\205\n\205\000\205\004?\000\205\002\202\0047\034MyReads MyWrites\205\004?\000\206\202\024Reply\024Write?\000?\000 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\002\203\004\205\002\203\004\01057$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000\013?\022\003?\020T\001\005?\026)\211\n?\024\206\203\004?\024? ?\024\203\004\020Fold\000)\211\002?\"\206? \034Collect\000)\211\006?(? \203\004\203\0024ListFromRange\000\003? \000\003?,\003\022z\003?.\004\007\010\000\n\003?\024\000)\251\000? \002\000\004)\251\000?\024\002\000\002)\211\006?$\203\005@? ?\024\030Invoke\000\003?F\006Add?@?D\001\006\002\014\000\007\016\000\003\005?\010?\014\006\002?\006?R\000\003?\014?\014\037/ \0018\000" TxId: 2 ExecLevel: 0 Flags: 0 2025-04-17T17:11:58.366651Z node 23 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:11:58.366746Z node 23 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T17:11:58.367575Z node 23 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit CheckDataTx 2025-04-17T17:11:58.367643Z node 23 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-04-17T17:11:58.367684Z node 23 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit CheckDataTx 2025-04-17T17:11:58.367723Z node 23 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T17:11:58.367757Z node 23 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T17:11:58.367793Z node 23 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-17T17:11:58.367854Z node 23 :TX_DATASHARD TRACE: Activated operation [0:2] at 9437184 2025-04-17T17:11:58.367890Z node 23 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-04-17T17:11:58.367911Z node 23 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-17T17:11:58.367933Z node 23 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit ExecuteDataTx 2025-04-17T17:11:58.367959Z node 23 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-04-17T17:11:58.368003Z node 23 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-17T17:11:58.368053Z node 23 :TX_DATASHARD TRACE: Operation [0:2] at 9437184 requested 132374 more memory 2025-04-17T17:11:58.372198Z node 23 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Restart 2025-04-17T17:11:58.372582Z node 23 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T17:11:58.372646Z node 23 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-04-17T17:11:58.372718Z node 23 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-17T17:11:58.373624Z node 23 :TX_DATASHARD TRACE: Operation [0:2] at 9437184 exceeded memory limit 132502 and requests 1060016 more for the next try 2025-04-17T17:11:58.373788Z node 23 :TX_DATASHARD DEBUG: tx 2 released its data 2025-04-17T17:11:58.373841Z node 23 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Restart 2025-04-17T17:11:58.374044Z node 23 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T17:11:58.374073Z node 23 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-04-17T17:11:58.374839Z node 23 :TX_DATASHARD DEBUG: tx 2 at 9437184 restored its data 2025-04-17T17:11:58.374897Z node 23 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-17T17:11:58.375333Z node 23 :TX_DATASHARD TRACE: Operation [0:2] at 9437184 exceeded memory limit 1192518 and requests 9540144 more for the next try 2025-04-17T17:11:58.375422Z node 23 :TX_DATASHARD DEBUG: tx 2 released its data 2025-04-17T17:11:58.375453Z node 23 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Restart 2025-04-17T17:11:58.375590Z node 23 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T17:11:58.375619Z node 23 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-04-17T17:11:58.376090Z node 23 :TX_DATASHARD DEBUG: tx 2 at 9437184 restored its data 2025-04-17T17:11:58.384266Z node 23 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-17T17:11:58.384890Z node 23 :TX_DATASHARD TRACE: Operation [0:2] at 9437184 exceeded memory limit 10732662 and requests 85861296 more for the next try 2025-04-17T17:11:58.385024Z node 23 :TX_DATASHARD DEBUG: tx 2 released its data 2025-04-17T17:11:58.385064Z node 23 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Restart 2025-04-17T17:11:58.385300Z node 23 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T17:11:58.385331Z node 23 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-04-17T17:11:58.385872Z node 23 :TX_DATASHARD DEBUG: tx 2 at 9437184 restored its data 2025-04-17T17:11:58.385909Z node 23 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-17T17:11:58.963035Z node 23 :TX_DATASHARD TRACE: Executed operation [0:2] at tablet 9437184 with status COMPLETE 2025-04-17T17:11:58.963134Z node 23 :TX_DATASHARD TRACE: Datashard execution counters for [0:2] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 8, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-17T17:11:58.963199Z node 23 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T17:11:58.963232Z node 23 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit ExecuteDataTx 2025-04-17T17:11:58.963272Z node 23 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit FinishPropose 2025-04-17T17:11:58.963313Z node 23 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit FinishPropose 2025-04-17T17:11:58.963395Z node 23 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-17T17:11:58.963421Z node 23 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit FinishPropose 2025-04-17T17:11:58.963454Z node 23 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit CompletedOperations 2025-04-17T17:11:58.963486Z node 23 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit CompletedOperations 2025-04-17T17:11:58.963525Z node 23 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-04-17T17:11:58.963549Z node 23 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit CompletedOperations 2025-04-17T17:11:58.963585Z node 23 :TX_DATASHARD TRACE: Execution plan for [0:2] at 9437184 has finished 2025-04-17T17:11:58.986276Z node 23 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T17:11:58.986590Z node 23 :TX_DATASHARD TRACE: Complete execution for [0:2] at 9437184 on unit FinishPropose 2025-04-17T17:11:58.986868Z node 23 :TX_DATASHARD TRACE: Propose transaction complete txid 2 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2025-04-17T17:11:58.987487Z node 23 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:11:58.991949Z node 23 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [23:299:2281], Recipient [23:235:2228]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:11:58.996562Z node 23 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:11:58.997224Z node 23 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [23:298:2280], serverId# [23:299:2281], sessionId# [0:0:0] 2025-04-17T17:11:58.997981Z node 23 :TX_DATASHARD TRACE: StateWork, received event# 268830214, Sender [23:297:2279], Recipient [23:235:2228]: NKikimrTabletBase.TEvGetCounters ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHadoopTokenSelectCount 2025-04-17 17:11:43,264 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-17 17:11:43,457 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 60 secs timeout. Process tree before termination: pid rss ref pdirt 21823 46.1M 46.1M 23.1M test_tool run_ut @/home/runner/.ya/build/build_root/6vg8/0025e9/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk3/testing_out_stuff/test 22435 1.4G 1.4G 957M └─ ydb-core-kqp-ut-federated_query-generic_ut --trace-path-append /home/runner/.ya/build/build_root/6vg8/0025e9/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unitt Test command err: Trying to start YDB, gRPC: 32049, MsgBus: 27826 2025-04-17T17:10:47.693870Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494330729856667855:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:10:47.693954Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/0025e9/r3tmp/tmp41EpCp/pdisk_1.dat 2025-04-17T17:10:48.140773Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:10:48.153340Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:10:48.153423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:10:48.162620Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32049, node 1 2025-04-17T17:10:48.256174Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:10:48.256198Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:10:48.256202Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:10:48.256277Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27826 TClient is connected to server localhost:27826 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:10:48.773145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:10:52.696994Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494330729856667855:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:10:52.697447Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:10:55.352872Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494330764216406910:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:55.352961Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:56.293095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-17T17:10:56.664735Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494330768511374333:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:56.664777Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:56.667913Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494330768511374338:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:10:56.690378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-17T17:10:56.836491Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494330768511374340:2356], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T17:10:56.941467Z node 1 :TX_PROXY ERROR: Actor# [1:7494330768511374405:2425] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:10:58.093162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:10:59.088830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:1, at schemeshard: 72057594046644480 2025-04-17T17:11:00.289596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T17:11:02.309666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T17:11:03.141085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:11:03.141371Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:03.659064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-17T17:11:04.302415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-17T17:11:04.347774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-17T17:11:06.984618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710704:0, at schemeshard: 72057594046644480 2025-04-17T17:11:07.003405Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2bptja98c3fbczne0ty9dh", SessionId: ydb://session/3?node_id=1&id=MzMxN2YxYWItOTI1MTIzYjctYjg3YmRkNjUtNDE3YjE5YWM=, Slow query, duration: 11.679256s, status: SUCCESS, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE OBJECT external_data_source_p (TYPE SECRET) WITH (value=qwerty12345);\n\n CREATE EXTERNAL DATA SOURCE external_data_source WITH (\n SOURCE_TYPE=\"Iceberg\",\n DATABASE_NAME=\"pgdb\",\n WAREHOUSE_TYPE=\"s3\",\n WAREHOUSE_S3_REGION=\"s3_region\",\n WAREHOUSE_S3_ENDPOINT=\"s3_endpoint\",\n WAREHOUSE_S3_URI=\"s3_uri\",\n \n AUTH_METHOD=\"TOKEN\",\n TOKEN_SECRET_NAME=\"external_data_source_p\"\n ,\n \n CATALOG_TYPE=\"hadoop\"\n ,\n USE_TLS=\"FALSE\"\n );\n ", parameters: 0b 2025-04-17T17:11:07.268744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710707:0, at schemeshard: 72057594046644480 2025-04-17T17:11:07.278734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710709:0, at schemeshard: 72057594046644480 2025-04-17T17:11:07.289422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710708:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { ... 644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:11:30.156980Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T17:11:30.661572Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:1, at schemeshard: 72057594046644480 2025-04-17T17:11:31.474474Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T17:11:32.649574Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-17T17:11:33.737029Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715681:0, at schemeshard: 72057594046644480 2025-04-17T17:11:34.588177Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-17T17:11:34.678023Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-17T17:11:37.632648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:11:37.632668Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:38.891422Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715706:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } columns { name: "col2" type { type_id: DOUBLE } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 Trying to start YDB, gRPC: 13028, MsgBus: 5606 2025-04-17T17:11:41.510587Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494330962643318413:2202];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/0025e9/r3tmp/tmpSUmQBb/pdisk_1.dat 2025-04-17T17:11:41.566122Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:11:41.669780Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:41.682821Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:11:41.682900Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:11:41.683991Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13028, node 3 2025-04-17T17:11:41.880787Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:11:41.880810Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:11:41.880817Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:11:41.880924Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5606 TClient is connected to server localhost:5606 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:11:42.863386Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:11:42.874764Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/6vg8/0025e9/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk3/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/6vg8/0025e9/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk3/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout",), {}) >> ResourcePoolsDdl::TestCreateResourcePool [GOOD] >> ResourcePoolsDdl::TestCreateResourcePoolOnServerless >> DataShardWrite::CancelImmediate [GOOD] >> DataShardWrite::DeletePrepared+Volatile >> TBSV::CleanupDroppedVolumesOnRestart [GOOD] |75.1%| [TA] $(B)/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/{meta.json ... results_accumulator.log} |75.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |75.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::MultiStatement [GOOD] Test command err: Trying to start YDB, gRPC: 29630, MsgBus: 24233 2025-04-17T17:11:38.766047Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494330948165440242:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:11:38.766501Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/00245e/r3tmp/tmpDl9DAF/pdisk_1.dat 2025-04-17T17:11:41.056529Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:11:41.337304Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:41.353489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:11:41.356534Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:11:41.394186Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29630, node 1 2025-04-17T17:11:41.532658Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:11:41.532678Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:11:41.532686Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:11:41.532794Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24233 TClient is connected to server localhost:24233 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:11:42.595625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:11:42.616422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:11:43.195392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:11:43.586343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:11:43.779712Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494330948165440242:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:11:44.044904Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:11:44.184517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:11:55.715297Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331021179886009:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:11:55.715534Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:11:56.305933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:11:56.309783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:11:56.309798Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:56.348292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:11:56.384822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:11:56.417382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:11:56.450811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:11:56.527949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:11:56.581158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:11:56.657446Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331025474853975:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:11:56.657537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:11:56.657935Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331025474853980:2498], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:11:56.662168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:11:56.684678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494331025474853982:2499], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:11:56.742075Z node 1 :TX_PROXY ERROR: Actor# [1:7494331025474854039:3597] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:11:58.233465Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494331034064788999:2555], status: GENERIC_ERROR, issues:
:4:32: Error: BATCH can't be used with multiple writes or reads. 2025-04-17T17:11:58.234711Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmFiZDZhNjUtMzRkY2ViN2QtOWIzZGIxYzctYmY4OWFiNw==, ActorId: [1:7494331034064788990:2549], ActorState: ExecuteState, TraceId: 01js2brqw8ey027rw9xx22xsvb, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T17:11:58.278024Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494331034064789014:2557], status: GENERIC_ERROR, issues:
:4:17: Error: BATCH can't be used with multiple writes or reads. 2025-04-17T17:11:58.279178Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmFiZDZhNjUtMzRkY2ViN2QtOWIzZGIxYzctYmY4OWFiNw==, ActorId: [1:7494331034064788990:2549], ActorState: ExecuteState, TraceId: 01js2brr1k68bhg8gxvf5nra1s, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T17:11:58.305413Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494331034064789018:2559], status: GENERIC_ERROR, issues:
:4:17: Error: BATCH can't be used with multiple writes or reads. 2025-04-17T17:11:58.306542Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmFiZDZhNjUtMzRkY2ViN2QtOWIzZGIxYzctYmY4OWFiNw==, ActorId: [1:7494331034064788990:2549], ActorState: ExecuteState, TraceId: 01js2brr2gdw58mfwvyxk5dzef, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T17:11:58.336979Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494331034064789030:2561], status: GENERIC_ERROR, issues:
:4:29: Error: BATCH can't be used with multiple writes or reads. 2025-04-17T17:11:58.338160Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmFiZDZhNjUtMzRkY2ViN2QtOWIzZGIxYzctYmY4OWFiNw==, ActorId: [1:7494331034064788990:2549], ActorState: ExecuteState, TraceId: 01js2brr3d1p31nvhpwx2kpfpw, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T17:11:58.355459Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494331034064789043:2563], status: GENERIC_ERROR, issues:
:3:29: Error: BATCH can't be used with multiple writes or reads. 2025-04-17T17:11:58.356822Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmFiZDZhNjUtMzRkY2ViN2QtOWIzZGIxYzctYmY4OWFiNw==, ActorId: [1:7494331034064788990:2549], ActorState: ExecuteState, TraceId: 01js2brr4a5hby138yys52rzmm, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> DataShardWrite::UpdateImmediate [GOOD] >> DataShardWrite::RejectOnChangeQueueOverflow |75.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLock-UseSink |75.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |75.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |75.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |75.1%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/{meta.json ... results_accumulator.log} |75.1%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::CleanupDroppedVolumesOnRestart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:12:00.808452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:12:00.808967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:12:00.809635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:12:00.809894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:12:00.810134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:12:00.810576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:12:00.811076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:12:00.811725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:12:00.814713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:12:01.050738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:12:01.050807Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:12:01.078283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:12:01.078743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:12:01.078909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:12:01.098397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:12:01.098604Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:12:01.099273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:12:01.099486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:12:01.102304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:12:01.103584Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:12:01.103641Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:12:01.103840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:12:01.103888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:12:01.103927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:12:01.104052Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:12:01.121022Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:12:01.268062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:12:01.268356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:01.268554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:12:01.268784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:12:01.268868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:01.273027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:12:01.273180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:12:01.273401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:01.273488Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:12:01.273522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:12:01.273557Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:12:01.281148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:01.281225Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:12:01.281264Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:12:01.287074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:01.287137Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:01.287183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:12:01.287237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:12:01.290211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:12:01.304065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:12:01.304320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:12:01.305257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:12:01.305443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:12:01.305493Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:12:01.305748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:12:01.305802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:12:01.305956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:12:01.306039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:12:01.310225Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:12:01.310305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:12:01.310533Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:12:01.310581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:12:01.310933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:01.310989Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:12:01.311087Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:12:01.311125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:12:01.311169Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:12:01.311215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:12:01.311253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:12:01.311292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:12:01.311355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:12:01.311389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:12:01.311458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:12:01.311494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:12:01.311535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:12:01.320540Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:12:01.320733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:12:01.320782Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... -17T17:12:01.506734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-17T17:12:01.506788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-17T17:12:01.506838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-17T17:12:01.526565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T17:12:01.528002Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:12:01.530581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:12:01.530654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:12:01.531099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:12:01.531151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T17:12:01.531233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:12:01.538429Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:12:01.544127Z node 1 :FLAT_TX_SCHEMESHARD WARN: TTxCleanBlockStoreVolumes Complete, done PersistRemoveBlockStoreVolume for 1 volumes, left 0, at schemeshard: 72057594046678944 2025-04-17T17:12:01.544209Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:12:01.544270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:12:01.544748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:403:2379] sender: [1:469:2058] recipient: [1:15:2062] 2025-04-17T17:12:01.605106Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:12:01.605305Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/BSVolume" took 216us result status StatusPathDoesNotExist 2025-04-17T17:12:01.605460Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-17T17:12:01.606358Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:403:2379] sender: [1:470:2058] recipient: [1:102:2137] Leader for TabletID 72057594046678944 is [1:403:2379] sender: [1:473:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:403:2379] sender: [1:474:2058] recipient: [1:472:2432] Leader for TabletID 72057594046678944 is [1:475:2433] sender: [1:476:2058] recipient: [1:472:2432] 2025-04-17T17:12:01.685163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:12:01.685256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:12:01.685313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:12:01.685365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:12:01.685414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:12:01.685444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:12:01.685505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:12:01.685569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:12:01.685841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:12:01.701865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:12:01.703077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:12:01.703234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:12:01.703331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:12:01.703361Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:12:01.703649Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:12:01.704388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2025-04-17T17:12:01.704507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:12:01.704591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:12:01.704926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:12:01.704989Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-17T17:12:01.705142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:12:01.705260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:12:01.705350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:12:01.705422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:12:01.705521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:12:01.705703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:12:01.705938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:12:01.706051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:12:01.706421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:12:01.706487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:12:01.706669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:12:01.706777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:12:01.706872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:12:01.707047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:12:01.707142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:12:01.707280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:12:01.707508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-17T17:12:01.707660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-17T17:12:01.707708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-17T17:12:01.707768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-17T17:12:01.720208Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:12:01.720306Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:12:01.720572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:12:01.720638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:12:01.720719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:12:01.722283Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:475:2433] sender: [1:536:2058] recipient: [1:15:2062] 2025-04-17T17:12:01.760814Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:12:01.761321Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/BSVolume" took 487us result status StatusPathDoesNotExist 2025-04-17T17:12:01.761492Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TTxDataShardMiniKQL::CrossShard_3_AllToOne [GOOD] >> TTxDataShardMiniKQL::CrossShard_4_OneToAll >> KqpDataIntegrityTrails::Upsert-LogEnabled+UseSink |75.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |75.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |75.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Ddl >> KqpDataIntegrityTrails::BrokenReadLock+UseSink >> DataShardWrite::UpsertPreparedManyTables-Volatile [GOOD] >> DataShardWrite::UpsertPreparedNoTxCache+Volatile >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap+useOltpSink >> DataShardWrite::UpsertNoLocksArbiterRestart [GOOD] >> DataShardWrite::UpsertLostPrepareArbiterRestart >> TBSV::ShouldLimitBlockStoreVolumeDropRate [GOOD] |75.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |75.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |75.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |75.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |75.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication >> SystemView::Nodes [GOOD] >> SystemView::ConcurrentScans >> TKeyValueTest::TestCleanUpDataWithMockDisk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::ShouldLimitBlockStoreVolumeDropRate [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:11:57.848185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:11:57.848629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:11:57.848687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:11:57.848729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:11:57.848793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:11:57.848834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:11:57.848900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:11:57.848991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:11:57.849355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:11:58.113971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:11:58.114041Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:58.152038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:11:58.161790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:11:58.162021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:11:58.187275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:11:58.187751Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:11:58.188432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:11:58.188821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:11:58.202584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:11:58.203937Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:11:58.204009Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:11:58.204158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:11:58.204202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:11:58.204251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:11:58.204444Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:11:58.220460Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:11:58.435205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:11:58.435427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:11:58.435648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:11:58.435863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:11:58.435914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:11:58.444216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:11:58.444355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:11:58.444564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:11:58.444645Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:11:58.444702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:11:58.444738Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:11:58.449689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:11:58.449767Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:11:58.449806Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:11:58.461058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:11:58.461139Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:11:58.461181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:11:58.461252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:11:58.465066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:11:58.475620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:11:58.480832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:11:58.493573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:11:58.494565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:11:58.495599Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:11:58.508449Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:11:58.508543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:11:58.508726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:11:58.508820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:11:58.517121Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:11:58.517176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:11:58.517347Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:11:58.517954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:11:58.520297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:11:58.520566Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:11:58.521860Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:11:58.522347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:11:58.522567Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:11:58.523154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:11:58.523377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:11:58.523600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:11:58.532064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:11:58.532410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:11:58.533213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:11:58.533496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:11:58.533786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:11:58.575355Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:11:58.575538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:11:58.575590Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 1/1 2025-04-17T17:12:02.889033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 129 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:12:02.897098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-04-17T17:12:02.897597Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 24 TxId_Deprecated: 24 TabletID: 72075186233409569 2025-04-17T17:12:02.898194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 24 ShardOwnerId: 72057594046678944 ShardLocalIdx: 24, at schemeshard: 72057594046678944 2025-04-17T17:12:02.898499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 4 2025-04-17T17:12:02.898734Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 23 TxId_Deprecated: 23 TabletID: 72075186233409568 Forgetting tablet 72075186233409569 2025-04-17T17:12:02.899187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 Forgetting tablet 72075186233409568 2025-04-17T17:12:02.899353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 23 ShardOwnerId: 72057594046678944 ShardLocalIdx: 23, at schemeshard: 72057594046678944 2025-04-17T17:12:02.899551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 3 2025-04-17T17:12:02.908555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 129:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:129 msg type: 269090816 2025-04-17T17:12:02.908724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 129, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 129 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 129 at step: 5000028 2025-04-17T17:12:02.909183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000028, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:12:02.909319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 129 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000028 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:12:02.909370Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropBlockStoreVolume TPropose, operationId: 129:0 HandleReply TEvOperationPlan, step: 5000028, at schemeshard: 72057594046678944 2025-04-17T17:12:02.909483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 2 2025-04-17T17:12:02.909629Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#129:0 progress is 1/1 2025-04-17T17:12:02.909669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-04-17T17:12:02.909703Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#129:0 progress is 1/1 2025-04-17T17:12:02.909744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-04-17T17:12:02.909806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:12:02.909871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 1 2025-04-17T17:12:02.909906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 129, ready parts: 1/1, is published: false 2025-04-17T17:12:02.909964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-04-17T17:12:02.910003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 129:0 2025-04-17T17:12:02.910029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 129:0 2025-04-17T17:12:02.910148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 2 2025-04-17T17:12:02.910183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 129, publications: 2, subscribers: 0 2025-04-17T17:12:02.910222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 1], 54 2025-04-17T17:12:02.910253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 13], 18446744073709551615 2025-04-17T17:12:02.911309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:24 2025-04-17T17:12:02.911357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:24 tabletId 72075186233409569 2025-04-17T17:12:02.911470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:23 2025-04-17T17:12:02.911508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:23 tabletId 72075186233409568 2025-04-17T17:12:02.914025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:24 hive 72057594037968897 at ss 72057594046678944 2025-04-17T17:12:02.914067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:23 hive 72057594037968897 at ss 72057594046678944 2025-04-17T17:12:02.914247Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:12:02.914301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:12:02.914490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 13] 2025-04-17T17:12:02.914630Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:12:02.914672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 129, path id: 1 2025-04-17T17:12:02.914704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 129, path id: 13 FAKE_COORDINATOR: Erasing txId 129 2025-04-17T17:12:02.915207Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 13 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 129 2025-04-17T17:12:02.915275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 13 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 129 2025-04-17T17:12:02.915303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 129 2025-04-17T17:12:02.915340Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 13], version: 18446744073709551615 2025-04-17T17:12:02.915391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 1 2025-04-17T17:12:02.915733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:12:02.915768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 13], at schemeshard: 72057594046678944 2025-04-17T17:12:02.915857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:12:02.916149Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 54 PathOwnerId: 72057594046678944, cookie: 129 2025-04-17T17:12:02.916206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 54 PathOwnerId: 72057594046678944, cookie: 129 2025-04-17T17:12:02.916236Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 129 2025-04-17T17:12:02.916271Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 54 2025-04-17T17:12:02.916303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:12:02.916371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 129, subscribers: 0 2025-04-17T17:12:02.916551Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 24 TxId_Deprecated: 24 2025-04-17T17:12:02.916665Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 23 TxId_Deprecated: 23 2025-04-17T17:12:02.916927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 24 ShardOwnerId: 72057594046678944 ShardLocalIdx: 24, at schemeshard: 72057594046678944 2025-04-17T17:12:02.917410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 23 ShardOwnerId: 72057594046678944 ShardLocalIdx: 23, at schemeshard: 72057594046678944 2025-04-17T17:12:02.928303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-04-17T17:12:02.931052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-17T17:12:02.931187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-04-17T17:12:02.936767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:24 2025-04-17T17:12:02.937392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:23 TestModificationResult got TxId: 129, wait until txId: 129 TestWaitNotification wait txId: 129 2025-04-17T17:12:02.938011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 129: send EvNotifyTxCompletion 2025-04-17T17:12:02.938057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 129 2025-04-17T17:12:02.938824Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 129, at schemeshard: 72057594046678944 2025-04-17T17:12:02.938921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 129: got EvNotifyTxCompletionResult 2025-04-17T17:12:02.938955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 129: satisfy waiter [1:1686:3556] TestWaitNotification: OK eventTxId 129 |75.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |75.2%| [LD] {RESULT} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |75.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |75.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |75.2%| [LD] {RESULT} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |75.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestCleanUpDataWithMockDisk [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvCleanUpDataRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvForceTabletDataCleanup ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:80:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:83:2057] recipient: [7:82:2113] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:85:2114] sender: [7:86:2057] recipient: [7:82:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:85:2114] Leader for TabletID 72057594037927937 is [7:85:2114] sender: [7:139:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTablet::TEvFollowerGcApplied ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:85:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:88:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:89:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:90:2118] sender: [8:91:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:90:2118] Leader for TabletID 72057594037927937 is [8:90:2118] sender: [8:144:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:89:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:92:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:93:2057] recipient: [9:91:2121] Leader for TabletID 72057594037927937 is [9:94:2122] sender: [9:95:2057] recipient: [9:91:2121] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:94:2122] Leader for TabletID 72057594037927937 is [9:94:2122] sender: [9:148:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvCleanUpDataRequest ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:89:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:92:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:93:2057] recipient: [10:91:2121] Leader for TabletID 72057594037927937 is [10:94:2122] sender: [10:95:2057] recipient: [10:91:2121] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:94:2122] Leader for TabletID 72057594037927937 is [10:94:2122] sender: [10:148:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:91:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:94:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:95:2057] recipient: [11:93:2123] Leader for TabletID 72057594037927937 is [11:96:2124] sender: [11:97:2057] recipient: [11:93:2123] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:96:2124] Leader for TabletID 72057594037927937 is [11:96:2124] sender: [11:150:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:1 ... 57594037927937 is [35:99:2125] sender: [35:153:2057] recipient: [35:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:54:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:59:2057] recipient: [36:54:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:76:2057] recipient: [36:17:2064] !Reboot 72057594037927937 (actor [36:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:95:2057] recipient: [36:39:2086] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:98:2057] recipient: [36:17:2064] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:99:2057] recipient: [36:97:2124] Leader for TabletID 72057594037927937 is [36:100:2125] sender: [36:101:2057] recipient: [36:97:2124] !Reboot 72057594037927937 (actor [36:58:2099]) rebooted! !Reboot 72057594037927937 (actor [36:58:2099]) tablet resolver refreshed! new actor is[36:100:2125] Leader for TabletID 72057594037927937 is [36:100:2125] sender: [36:154:2057] recipient: [36:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:52:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:59:2057] recipient: [37:52:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:76:2057] recipient: [37:17:2064] !Reboot 72057594037927937 (actor [37:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:98:2057] recipient: [37:39:2086] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:101:2057] recipient: [37:17:2064] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:102:2057] recipient: [37:100:2127] Leader for TabletID 72057594037927937 is [37:103:2128] sender: [37:104:2057] recipient: [37:100:2127] !Reboot 72057594037927937 (actor [37:58:2099]) rebooted! !Reboot 72057594037927937 (actor [37:58:2099]) tablet resolver refreshed! new actor is[37:103:2128] Leader for TabletID 72057594037927937 is [37:103:2128] sender: [37:157:2057] recipient: [37:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:54:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:59:2057] recipient: [38:54:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:76:2057] recipient: [38:17:2064] !Reboot 72057594037927937 (actor [38:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:98:2057] recipient: [38:39:2086] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:101:2057] recipient: [38:17:2064] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:102:2057] recipient: [38:100:2127] Leader for TabletID 72057594037927937 is [38:103:2128] sender: [38:104:2057] recipient: [38:100:2127] !Reboot 72057594037927937 (actor [38:58:2099]) rebooted! !Reboot 72057594037927937 (actor [38:58:2099]) tablet resolver refreshed! new actor is[38:103:2128] Leader for TabletID 72057594037927937 is [38:103:2128] sender: [38:157:2057] recipient: [38:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:53:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:59:2057] recipient: [39:53:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:76:2057] recipient: [39:17:2064] !Reboot 72057594037927937 (actor [39:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:99:2057] recipient: [39:39:2086] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:102:2057] recipient: [39:17:2064] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:103:2057] recipient: [39:101:2127] Leader for TabletID 72057594037927937 is [39:104:2128] sender: [39:105:2057] recipient: [39:101:2127] !Reboot 72057594037927937 (actor [39:58:2099]) rebooted! !Reboot 72057594037927937 (actor [39:58:2099]) tablet resolver refreshed! new actor is[39:104:2128] Leader for TabletID 72057594037927937 is [39:104:2128] sender: [39:158:2057] recipient: [39:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:59:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:76:2057] recipient: [40:17:2064] !Reboot 72057594037927937 (actor [40:58:2099]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:100:2057] recipient: [40:39:2086] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:103:2057] recipient: [40:17:2064] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:104:2057] recipient: [40:102:2128] Leader for TabletID 72057594037927937 is [40:105:2129] sender: [40:106:2057] recipient: [40:102:2128] !Reboot 72057594037927937 (actor [40:58:2099]) rebooted! !Reboot 72057594037927937 (actor [40:58:2099]) tablet resolver refreshed! new actor is[40:105:2129] Leader for TabletID 72057594037927937 is [40:105:2129] sender: [40:125:2057] recipient: [40:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:59:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:76:2057] recipient: [41:17:2064] !Reboot 72057594037927937 (actor [41:58:2099]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:101:2057] recipient: [41:39:2086] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:103:2057] recipient: [41:17:2064] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:105:2057] recipient: [41:104:2129] Leader for TabletID 72057594037927937 is [41:106:2130] sender: [41:107:2057] recipient: [41:104:2129] !Reboot 72057594037927937 (actor [41:58:2099]) rebooted! !Reboot 72057594037927937 (actor [41:58:2099]) tablet resolver refreshed! new actor is[41:106:2130] Leader for TabletID 72057594037927937 is [41:106:2130] sender: [41:126:2057] recipient: [41:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:53:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:59:2057] recipient: [42:53:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:76:2057] recipient: [42:17:2064] !Reboot 72057594037927937 (actor [42:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:104:2057] recipient: [42:39:2086] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:107:2057] recipient: [42:17:2064] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:108:2057] recipient: [42:106:2132] Leader for TabletID 72057594037927937 is [42:109:2133] sender: [42:110:2057] recipient: [42:106:2132] !Reboot 72057594037927937 (actor [42:58:2099]) rebooted! !Reboot 72057594037927937 (actor [42:58:2099]) tablet resolver refreshed! new actor is[42:109:2133] Leader for TabletID 72057594037927937 is [42:109:2133] sender: [42:163:2057] recipient: [42:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:53:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:59:2057] recipient: [43:53:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:76:2057] recipient: [43:17:2064] !Reboot 72057594037927937 (actor [43:58:2099]) on event NKikimr::TEvKeyValue::TEvCleanUpDataRequest ! Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:104:2057] recipient: [43:39:2086] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:107:2057] recipient: [43:17:2064] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:108:2057] recipient: [43:106:2132] Leader for TabletID 72057594037927937 is [43:109:2133] sender: [43:110:2057] recipient: [43:106:2132] !Reboot 72057594037927937 (actor [43:58:2099]) rebooted! !Reboot 72057594037927937 (actor [43:58:2099]) tablet resolver refreshed! new actor is[43:109:2133] Leader for TabletID 72057594037927937 is [43:109:2133] sender: [43:163:2057] recipient: [43:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:56:2057] recipient: [44:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:56:2057] recipient: [44:53:2097] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:59:2057] recipient: [44:53:2097] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:76:2057] recipient: [44:17:2064] !Reboot 72057594037927937 (actor [44:58:2099]) on event NKikimr::TEvKeyValue::TEvForceTabletDataCleanup ! Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:104:2057] recipient: [44:39:2086] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:107:2057] recipient: [44:17:2064] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:108:2057] recipient: [44:106:2132] Leader for TabletID 72057594037927937 is [44:109:2133] sender: [44:110:2057] recipient: [44:106:2132] !Reboot 72057594037927937 (actor [44:58:2099]) rebooted! !Reboot 72057594037927937 (actor [44:58:2099]) tablet resolver refreshed! new actor is[44:109:2133] Leader for TabletID 72057594037927937 is [44:109:2133] sender: [44:163:2057] recipient: [44:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:56:2057] recipient: [45:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:56:2057] recipient: [45:53:2097] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:59:2057] recipient: [45:53:2097] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:76:2057] recipient: [45:17:2064] !Reboot 72057594037927937 (actor [45:58:2099]) on event NKikimr::TEvTablet::TEvFollowerGcApplied ! Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:109:2057] recipient: [45:39:2086] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:112:2057] recipient: [45:17:2064] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:113:2057] recipient: [45:111:2136] Leader for TabletID 72057594037927937 is [45:114:2137] sender: [45:115:2057] recipient: [45:111:2136] !Reboot 72057594037927937 (actor [45:58:2099]) rebooted! !Reboot 72057594037927937 (actor [45:58:2099]) tablet resolver refreshed! new actor is[45:114:2137] Leader for TabletID 72057594037927937 is [45:114:2137] sender: [45:168:2057] recipient: [45:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:56:2057] recipient: [46:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:56:2057] recipient: [46:53:2097] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:59:2057] recipient: [46:53:2097] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:76:2057] recipient: [46:17:2064] >> TopicAutoscaling::PartitionSplit_PQv1 >> TExportToS3Tests::CancelUponCreatingExportDirShouldSucceed >> ShowCreateView::WithSingleQuotedTablePathPrefix [FAIL] >> ShowCreateView::WithTwoTablePathPrefixes |75.2%| [TA] $(B)/ydb/core/tx/schemeshard/ut_bsvolume/test-results/unittest/{meta.json ... results_accumulator.log} >> TopicAutoscaling::Simple_BeforeAutoscaleAwareSDK >> TopicAutoscaling::PartitionSplit_ManySession_BeforeAutoscaleAwareSDK >> TopicAutoscaling::CommitTopPast >> TExportToS3Tests::RebootDuringCompletion >> Balancing::Balancing_OneTopic_TopicApi >> TExportToS3Tests::ShouldSucceedOnMultiShardTable >> TExportToS3Tests::ShouldOmitNonStrictStorageSettings >> DataShardWrite::DeletePrepared+Volatile [GOOD] >> DataShardWrite::DeletePrepared-Volatile >> TopicAutoscaling::Simple_AutoscaleAwareSDK >> DataShardWrite::InsertImmediate [GOOD] >> DataShardWrite::ImmediateAndPlannedCommittedOpsRace >> DataShardWrite::RejectOnChangeQueueOverflow [GOOD] >> DataShardWrite::UpsertBrokenLockArbiter >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_BeforeAutoscaleAwareSDK >> TExportToS3Tests::DropSourceTableBeforeTransferring >> DataShardWrite::UpsertPreparedNoTxCache+Volatile [GOOD] >> DataShardWrite::UpsertPreparedNoTxCache-Volatile >> SystemView::AuthGroups_Access [GOOD] >> SystemView::AuthGroups_ResultOrder >> DataShardWrite::UpsertLostPrepareArbiterRestart [GOOD] >> TopicAutoscaling::PartitionMerge_PreferedPartition_BeforeAutoscaleAwareSDK >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK >> DbCounters::TabletsSimple [GOOD] >> LabeledDbCounters::OneTablet ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::UpsertLostPrepareArbiterRestart [GOOD] Test command err: 2025-04-17T17:11:23.967978Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:11:23.968524Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:11:23.968582Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/00176c/r3tmp/tmpfnqILp/pdisk_1.dat 2025-04-17T17:11:24.886133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:11:25.207192Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:25.324772Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:11:25.325499Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:11:25.338474Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:11:25.449247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:11:25.510772Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:11:25.511810Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:11:25.524382Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T17:11:25.524722Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:11:25.686059Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:11:25.692518Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:11:25.692844Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:11:25.750165Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:11:25.750274Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:11:25.750325Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:11:25.751089Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:11:25.751477Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:11:25.751875Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T17:11:25.767493Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:11:26.051784Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:11:26.051982Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:11:26.052091Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T17:11:26.052138Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:11:26.052171Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:11:26.052213Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:11:26.052424Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:11:26.052464Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:11:26.052712Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:11:26.052788Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:11:26.052870Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:11:26.052907Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:11:26.052944Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T17:11:26.052983Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:11:26.053015Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:11:26.053042Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:11:26.053077Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:11:26.053184Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:11:26.053222Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:11:26.053273Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T17:11:26.053607Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T17:11:26.053645Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:11:26.053731Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:11:26.053923Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T17:11:26.053965Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:11:26.054046Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:11:26.054085Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:11:26.054119Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T17:11:26.054160Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T17:11:26.054185Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:11:26.054407Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T17:11:26.054435Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T17:11:26.054475Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:11:26.054501Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:11:26.054536Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T17:11:26.054559Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:11:26.054594Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T17:11:26.054620Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:11:26.054641Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T17:11:26.055739Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T17:11:26.055787Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:11:26.069988Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:11:26.070255Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:11:26.071466Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:11:26.071761Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-17T17:11:26.072249Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T17:11:26.310104Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:11:26.310156Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:11:26.310441Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T17:11:26.321876Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-17T17:11:26.322417Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:11:26.323531Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:11:26.324087Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-17T17:11:26.332664Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-17T17:11:26.334447Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-17T17:11:26.425368Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T17:11:26.425440Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:11:26.439541Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:11:26.439588Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:11:26.439892Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:11:2 ... chemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-04-17T17:12:17.000037Z node 6 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[6:1003:2819], 1001} after executionsCount# 1 2025-04-17T17:12:17.000066Z node 6 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[6:1003:2819], 1001} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T17:12:17.006783Z node 6 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[6:1003:2819], 1001} finished in read 2025-04-17T17:12:17.006843Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-04-17T17:12:17.006867Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit ExecuteRead 2025-04-17T17:12:17.006887Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037889 to execution unit CompletedOperations 2025-04-17T17:12:17.006909Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037889 on unit CompletedOperations 2025-04-17T17:12:17.006950Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-04-17T17:12:17.006972Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit CompletedOperations 2025-04-17T17:12:17.006994Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037889 has finished 2025-04-17T17:12:17.007015Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-04-17T17:12:17.007083Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-04-17T17:12:17.008237Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:1009:2825], Recipient [6:719:2597]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:12:17.008265Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:12:17.008293Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [6:1008:2824], serverId# [6:1009:2825], sessionId# [0:0:0] 2025-04-17T17:12:17.008370Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [6:1007:2823], Recipient [6:719:2597]: NKikimrTxDataShard.TEvGetInfoRequest 2025-04-17T17:12:17.009094Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:1012:2828], Recipient [6:719:2597]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:12:17.009122Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:12:17.009148Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [6:1011:2827], serverId# [6:1012:2828], sessionId# [0:0:0] 2025-04-17T17:12:17.009237Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [6:1010:2826], Recipient [6:719:2597]: NKikimrTxDataShard.TEvRead ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-04-17T17:12:17.009302Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 0 2025-04-17T17:12:17.009327Z node 6 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037890 CompleteEdge# v1001/1000001 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-17T17:12:17.009350Z node 6 :TX_DATASHARD TRACE: 72075186224037890 changed HEAD read to non-repeatable v4000/18446744073709551615 2025-04-17T17:12:17.009382Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037890 on unit CheckRead 2025-04-17T17:12:17.009423Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037890 is Executed 2025-04-17T17:12:17.009442Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037890 executing on unit CheckRead 2025-04-17T17:12:17.009461Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037890 to execution unit BuildAndWaitDependencies 2025-04-17T17:12:17.009479Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037890 on unit BuildAndWaitDependencies 2025-04-17T17:12:17.009509Z node 6 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037890 2025-04-17T17:12:17.009530Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037890 is Executed 2025-04-17T17:12:17.009547Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037890 executing on unit BuildAndWaitDependencies 2025-04-17T17:12:17.009562Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037890 to execution unit ExecuteRead 2025-04-17T17:12:17.009577Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037890 on unit ExecuteRead 2025-04-17T17:12:17.009621Z node 6 :TX_DATASHARD TRACE: 72075186224037890 Execute read# 1, request: { ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-04-17T17:12:17.009700Z node 6 :TX_DATASHARD TRACE: 72075186224037890 Complete read# {[6:1010:2826], 1002} after executionsCount# 1 2025-04-17T17:12:17.009725Z node 6 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[6:1010:2826], 1002} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T17:12:17.009783Z node 6 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[6:1010:2826], 1002} finished in read 2025-04-17T17:12:17.009811Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037890 is Executed 2025-04-17T17:12:17.009829Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037890 executing on unit ExecuteRead 2025-04-17T17:12:17.009845Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037890 to execution unit CompletedOperations 2025-04-17T17:12:17.009863Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037890 on unit CompletedOperations 2025-04-17T17:12:17.009888Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037890 is Executed 2025-04-17T17:12:17.009908Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037890 executing on unit CompletedOperations 2025-04-17T17:12:17.009925Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037890 has finished 2025-04-17T17:12:17.009943Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037890 2025-04-17T17:12:17.009986Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037890 2025-04-17T17:12:17.011447Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:1016:2832], Recipient [6:716:2595]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:12:17.011476Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:12:17.011501Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [6:1015:2831], serverId# [6:1016:2832], sessionId# [0:0:0] 2025-04-17T17:12:17.011545Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [6:1014:2830], Recipient [6:716:2595]: NKikimrTxDataShard.TEvGetInfoRequest 2025-04-17T17:12:17.022063Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:1019:2835], Recipient [6:716:2595]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:12:17.022103Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:12:17.022134Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [6:1018:2834], serverId# [6:1019:2835], sessionId# [0:0:0] 2025-04-17T17:12:17.022552Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [6:1017:2833], Recipient [6:716:2595]: NKikimrTxDataShard.TEvRead ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-04-17T17:12:17.022650Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037891, FollowerId 0 2025-04-17T17:12:17.033243Z node 6 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037891 CompleteEdge# v1000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-17T17:12:17.033306Z node 6 :TX_DATASHARD TRACE: 72075186224037891 changed HEAD read to non-repeatable v4000/18446744073709551615 2025-04-17T17:12:17.033354Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037891 on unit CheckRead 2025-04-17T17:12:17.033425Z node 6 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037891 is Executed 2025-04-17T17:12:17.033454Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037891 executing on unit CheckRead 2025-04-17T17:12:17.033481Z node 6 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037891 to execution unit BuildAndWaitDependencies 2025-04-17T17:12:17.033504Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037891 on unit BuildAndWaitDependencies 2025-04-17T17:12:17.033548Z node 6 :TX_DATASHARD TRACE: Activated operation [0:2] at 72075186224037891 2025-04-17T17:12:17.033577Z node 6 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037891 is Executed 2025-04-17T17:12:17.033598Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037891 executing on unit BuildAndWaitDependencies 2025-04-17T17:12:17.033616Z node 6 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037891 to execution unit ExecuteRead 2025-04-17T17:12:17.033632Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037891 on unit ExecuteRead 2025-04-17T17:12:17.033711Z node 6 :TX_DATASHARD TRACE: 72075186224037891 Execute read# 1, request: { ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-04-17T17:12:17.034113Z node 6 :TX_DATASHARD TRACE: 72075186224037891 Complete read# {[6:1017:2833], 1003} after executionsCount# 1 2025-04-17T17:12:17.034149Z node 6 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[6:1017:2833], 1003} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T17:12:17.034196Z node 6 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[6:1017:2833], 1003} finished in read 2025-04-17T17:12:17.034232Z node 6 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037891 is Executed 2025-04-17T17:12:17.034252Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037891 executing on unit ExecuteRead 2025-04-17T17:12:17.034282Z node 6 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037891 to execution unit CompletedOperations 2025-04-17T17:12:17.034300Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037891 on unit CompletedOperations 2025-04-17T17:12:17.034335Z node 6 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037891 is Executed 2025-04-17T17:12:17.034353Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037891 executing on unit CompletedOperations 2025-04-17T17:12:17.034374Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:2] at 72075186224037891 has finished 2025-04-17T17:12:17.034396Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037891 2025-04-17T17:12:17.034467Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037891 >> DataShardWrite::UpsertPreparedNoTxCache-Volatile [GOOD] >> DataShardWrite::ImmediateAndPlannedCommittedOpsRace [GOOD] >> DataShardWrite::PreparedDistributedWritePageFault >> TExportToS3Tests::CancelUponCreatingExportDirShouldSucceed [GOOD] >> TExportToS3Tests::CancelUponTransferringSingleShardTableShouldSucceed >> TopicAutoscaling::PartitionSplit_BeforeAutoscaleAwareSDK >> TExportToS3Tests::RebootDuringCompletion [GOOD] >> TExportToS3Tests::RebootDuringAbortion >> TestProgram::CountWithNulls >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOkNewApi [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::UpsertPreparedNoTxCache-Volatile [GOOD] Test command err: 2025-04-17T17:11:25.310819Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:11:25.311392Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:11:25.311453Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/001684/r3tmp/tmpsZ5x67/pdisk_1.dat 2025-04-17T17:11:26.539825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:11:26.692756Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:26.795805Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:11:26.805061Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:11:26.835069Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:11:26.994014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:11:27.485237Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:11:27.510550Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:11:27.524506Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T17:11:27.534369Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:11:27.791744Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:11:27.794675Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:11:27.794807Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:11:27.796358Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:11:27.796444Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:11:27.796502Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:11:27.796842Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:11:27.796964Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:11:27.797050Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T17:11:27.807783Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:11:27.836027Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:11:27.836842Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:11:27.837003Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T17:11:27.837051Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:11:27.837094Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:11:27.837154Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:11:27.837390Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:11:27.837435Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:11:27.837691Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:11:27.837906Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:11:27.838032Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:11:27.838076Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:11:27.838125Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T17:11:27.838157Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:11:27.838190Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:11:27.838222Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:11:27.838260Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:11:27.838397Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:11:27.838429Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:11:27.838478Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T17:11:27.838867Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T17:11:27.838929Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:11:27.839042Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:11:27.839268Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T17:11:27.839331Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:11:27.839407Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:11:27.839458Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:11:27.839493Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T17:11:27.839534Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T17:11:27.839580Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:11:27.839829Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T17:11:27.839862Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T17:11:27.839897Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:11:27.839926Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:11:27.839970Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T17:11:27.839998Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:11:27.840062Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T17:11:27.840091Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:11:27.840135Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T17:11:27.841463Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T17:11:27.841508Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:11:27.852701Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:11:27.852771Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:11:27.852805Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:11:27.852853Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-17T17:11:27.852946Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T17:11:28.037490Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:11:28.037729Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:11:28.038138Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T17:11:28.040661Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-17T17:11:28.041062Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:11:28.042155Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:11:28.042606Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-17T17:11:28.043030Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-17T17:11:28.043258Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-17T17:11:28.063984Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T17:11:28.064492Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:11:28.070769Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:11:28.070813Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:11:28.071215Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:11:2 ... 6173Z node 6 :TX_DATASHARD TRACE: Table /Root/table-1, shard: 72075186224037888, write point (Uint32 : 4) 2025-04-17T17:12:23.196203Z node 6 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint32 : 4) table: [72057594046644480:2:1] 2025-04-17T17:12:23.196484Z node 6 :TX_DATASHARD DEBUG: LoadWriteDetails at 72075186224037888 loaded writeOp from db 1500:100 keys extracted: 3 2025-04-17T17:12:23.197420Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-04-17T17:12:23.197457Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit LoadWriteDetails 2025-04-17T17:12:23.197732Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-17T17:12:23.197765Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-17T17:12:23.198092Z node 6 :TX_DATASHARD TRACE: Operation [1500:100] is the new logically complete end at 72075186224037888 2025-04-17T17:12:23.198390Z node 6 :TX_DATASHARD TRACE: Operation [1500:100] is the new logically incomplete end at 72075186224037888 2025-04-17T17:12:23.198677Z node 6 :TX_DATASHARD TRACE: Activated operation [1500:100] at 72075186224037888 2025-04-17T17:12:23.198973Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-04-17T17:12:23.199002Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T17:12:23.199044Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit BuildWriteOutRS 2025-04-17T17:12:23.199077Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit BuildWriteOutRS 2025-04-17T17:12:23.199134Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-04-17T17:12:23.199156Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit BuildWriteOutRS 2025-04-17T17:12:23.199177Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit StoreAndSendWriteOutRS 2025-04-17T17:12:23.199198Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit StoreAndSendWriteOutRS 2025-04-17T17:12:23.199220Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-04-17T17:12:23.199242Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit StoreAndSendWriteOutRS 2025-04-17T17:12:23.199262Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit PrepareWriteTxInRS 2025-04-17T17:12:23.199280Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit PrepareWriteTxInRS 2025-04-17T17:12:23.199316Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-04-17T17:12:23.199337Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit PrepareWriteTxInRS 2025-04-17T17:12:23.199358Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit LoadAndWaitInRS 2025-04-17T17:12:23.199382Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit LoadAndWaitInRS 2025-04-17T17:12:23.199404Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-04-17T17:12:23.199424Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit LoadAndWaitInRS 2025-04-17T17:12:23.199444Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit ExecuteWrite 2025-04-17T17:12:23.199472Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit ExecuteWrite 2025-04-17T17:12:23.199499Z node 6 :TX_DATASHARD DEBUG: Executing write operation for [1500:100] at 72075186224037888 2025-04-17T17:12:23.199673Z node 6 :TX_DATASHARD DEBUG: Executed write operation for [1500:100] at 72075186224037888, row count=3 2025-04-17T17:12:23.199740Z node 6 :TX_DATASHARD TRACE: add locks to result: 0 2025-04-17T17:12:23.199802Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:12:23.199829Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit ExecuteWrite 2025-04-17T17:12:23.199872Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit CompleteWrite 2025-04-17T17:12:23.199920Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit CompleteWrite 2025-04-17T17:12:23.204003Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is DelayComplete 2025-04-17T17:12:23.204075Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit CompleteWrite 2025-04-17T17:12:23.204361Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T17:12:23.204619Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit CompletedOperations 2025-04-17T17:12:23.204659Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-04-17T17:12:23.204681Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T17:12:23.204923Z node 6 :TX_DATASHARD TRACE: Execution plan for [1500:100] at 72075186224037888 has finished 2025-04-17T17:12:23.205253Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:12:23.205669Z node 6 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-17T17:12:23.206123Z node 6 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:12:23.206291Z node 6 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:12:23.221539Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-04-17T17:12:23.222488Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:12:23.222927Z node 6 :TX_DATASHARD TRACE: Complete execution for [1500:100] at 72075186224037888 on unit CompleteWrite 2025-04-17T17:12:23.223323Z node 6 :TX_DATASHARD DEBUG: Complete write [1500 : 100] from 72075186224037888 at tablet 72075186224037888 send result to client [6:593:2518] 2025-04-17T17:12:23.223978Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:12:23.238779Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:767:2637], Recipient [6:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:12:23.239146Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:12:23.239787Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [6:766:2636], serverId# [6:767:2637], sessionId# [0:0:0] 2025-04-17T17:12:23.240916Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [6:765:2635], Recipient [6:666:2570]: NKikimrTxDataShard.TEvGetInfoRequest 2025-04-17T17:12:23.248455Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:770:2640], Recipient [6:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:12:23.249277Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:12:23.249647Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [6:769:2639], serverId# [6:770:2640], sessionId# [0:0:0] 2025-04-17T17:12:23.251014Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [6:768:2638], Recipient [6:666:2570]: NKikimrTxDataShard.TEvRead ReadId: 1000 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-04-17T17:12:23.251429Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-17T17:12:23.251776Z node 6 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1500/100 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-17T17:12:23.260286Z node 6 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v1500/18446744073709551615 2025-04-17T17:12:23.261312Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2025-04-17T17:12:23.262401Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-17T17:12:23.262712Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2025-04-17T17:12:23.263602Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-17T17:12:23.263941Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-17T17:12:23.264011Z node 6 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037888 2025-04-17T17:12:23.264343Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-17T17:12:23.264372Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T17:12:23.264395Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2025-04-17T17:12:23.264424Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2025-04-17T17:12:23.265421Z node 6 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1000 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-04-17T17:12:23.269397Z node 6 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[6:768:2638], 1000} after executionsCount# 1 2025-04-17T17:12:23.269780Z node 6 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[6:768:2638], 1000} sends rowCount# 3, bytes# 96, quota rows left# 18446744073709551612, quota bytes left# 18446744073709551519, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T17:12:23.270505Z node 6 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[6:768:2638], 1000} finished in read 2025-04-17T17:12:23.270846Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-17T17:12:23.270872Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2025-04-17T17:12:23.271169Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T17:12:23.271194Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2025-04-17T17:12:23.271530Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-17T17:12:23.271815Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T17:12:23.272100Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037888 has finished 2025-04-17T17:12:23.272440Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-17T17:12:23.273752Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 >> TestProgram::CountWithNulls [GOOD] >> TExportToS3Tests::ShouldSucceedOnMultiShardTable [GOOD] >> TExportToS3Tests::ShouldSucceedOnManyTables ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::CountWithNulls [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { GroupBy { Aggregates { Column { Id: 10001 } Function { Id: 2 Arguments { Id: 2 } } } } } Command { Projection { Columns { Id: 10001 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { GroupBy { Aggregates { Column { Id: 10001 } Function { Id: 2 Arguments { Id: 2 } } } } } Command { Projection { Columns { Id: 10001 } } } ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:156;graph_constructed=digraph program {N0[shape=box, label="N2(15):{\"i\":\"2\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"10001\",\"t\":\"Calculation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N1[shape=box, label="N0(2):{\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N2[shape=box, label="N1(7):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"uid\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N1 -> N2[label="1"]; N3[shape=box, label="N3(15):{\"i\":\"10001\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N3[label="1"]; N1->N2->N0->N3[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[{"from":0}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"uid","id":2}]},"o":"2","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"i":"10001","t":"Projection"},"w":15,"id":3},"2":{"p":{"i":"2","p":{"address":{"name":"uid","id":2}},"o":"2","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"2","p":{"kernel":{"class_name":"SIMPLE"}},"o":"10001","t":"Calculation"},"w":15,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10UInt64TypeE; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:84:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:88:2057] recipient: [8:86:2116] Leader for TabletID 72057594037927937 is [8:89:2117] sender: [8:90:2057] recipient: [8:86:2116] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:89:2117] Leader for TabletID 72057594037927937 is [8:89:2117] sender: [8:143:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:84:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:88:2057] recipient: [9:86:2116] Leader for TabletID 72057594037927937 is [9:89:2117] sender: [9:90:2057] recipient: [9:86:2116] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:89:2117] Leader for TabletID 72057594037927937 is [9:89:2117] sender: [9:143:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:85:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:89:2057] recipient: [10:87:2116] Leader for TabletID 72057594037927937 is [10:90:2117] sender: [10:91:2057] recipient: [10:87:2116] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:90:2117] Leader for TabletID 72057594037927937 is [10:90:2117] sender: [10:144:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:88:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:91:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:92:2057] recipient: [11:90:2119] Leader for TabletID 72057594037927937 is [11:93:2120] sender: [11:94:2057] recipient: [11:90:2119] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:93:2120] Leader for TabletID 72057594037927937 is [11:93:2120] sender: [11:147:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... 6:2097]) rebooted! !Reboot 72057594037927937 (actor [44:56:2097]) tablet resolver refreshed! new actor is[44:101:2126] Leader for TabletID 72057594037927937 is [44:101:2126] sender: [44:155:2057] recipient: [44:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:54:2057] recipient: [45:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:54:2057] recipient: [45:51:2095] Leader for TabletID 72057594037927937 is [45:56:2097] sender: [45:57:2057] recipient: [45:51:2095] Leader for TabletID 72057594037927937 is [45:56:2097] sender: [45:74:2057] recipient: [45:14:2061] !Reboot 72057594037927937 (actor [45:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [45:56:2097] sender: [45:97:2057] recipient: [45:36:2083] Leader for TabletID 72057594037927937 is [45:56:2097] sender: [45:100:2057] recipient: [45:14:2061] Leader for TabletID 72057594037927937 is [45:56:2097] sender: [45:101:2057] recipient: [45:99:2125] Leader for TabletID 72057594037927937 is [45:102:2126] sender: [45:103:2057] recipient: [45:99:2125] !Reboot 72057594037927937 (actor [45:56:2097]) rebooted! !Reboot 72057594037927937 (actor [45:56:2097]) tablet resolver refreshed! new actor is[45:102:2126] Leader for TabletID 72057594037927937 is [45:102:2126] sender: [45:120:2057] recipient: [45:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:54:2057] recipient: [46:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:54:2057] recipient: [46:51:2095] Leader for TabletID 72057594037927937 is [46:56:2097] sender: [46:57:2057] recipient: [46:51:2095] Leader for TabletID 72057594037927937 is [46:56:2097] sender: [46:74:2057] recipient: [46:14:2061] !Reboot 72057594037927937 (actor [46:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [46:56:2097] sender: [46:99:2057] recipient: [46:36:2083] Leader for TabletID 72057594037927937 is [46:56:2097] sender: [46:102:2057] recipient: [46:14:2061] Leader for TabletID 72057594037927937 is [46:56:2097] sender: [46:103:2057] recipient: [46:101:2127] Leader for TabletID 72057594037927937 is [46:104:2128] sender: [46:105:2057] recipient: [46:101:2127] !Reboot 72057594037927937 (actor [46:56:2097]) rebooted! !Reboot 72057594037927937 (actor [46:56:2097]) tablet resolver refreshed! new actor is[46:104:2128] Leader for TabletID 72057594037927937 is [46:104:2128] sender: [46:158:2057] recipient: [46:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [47:54:2057] recipient: [47:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [47:54:2057] recipient: [47:51:2095] Leader for TabletID 72057594037927937 is [47:56:2097] sender: [47:57:2057] recipient: [47:51:2095] Leader for TabletID 72057594037927937 is [47:56:2097] sender: [47:74:2057] recipient: [47:14:2061] !Reboot 72057594037927937 (actor [47:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [47:56:2097] sender: [47:99:2057] recipient: [47:36:2083] Leader for TabletID 72057594037927937 is [47:56:2097] sender: [47:102:2057] recipient: [47:14:2061] Leader for TabletID 72057594037927937 is [47:56:2097] sender: [47:103:2057] recipient: [47:101:2127] Leader for TabletID 72057594037927937 is [47:104:2128] sender: [47:105:2057] recipient: [47:101:2127] !Reboot 72057594037927937 (actor [47:56:2097]) rebooted! !Reboot 72057594037927937 (actor [47:56:2097]) tablet resolver refreshed! new actor is[47:104:2128] Leader for TabletID 72057594037927937 is [47:104:2128] sender: [47:158:2057] recipient: [47:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:54:2057] recipient: [48:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:54:2057] recipient: [48:51:2095] Leader for TabletID 72057594037927937 is [48:56:2097] sender: [48:57:2057] recipient: [48:51:2095] Leader for TabletID 72057594037927937 is [48:56:2097] sender: [48:74:2057] recipient: [48:14:2061] !Reboot 72057594037927937 (actor [48:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [48:56:2097] sender: [48:100:2057] recipient: [48:36:2083] Leader for TabletID 72057594037927937 is [48:56:2097] sender: [48:103:2057] recipient: [48:14:2061] Leader for TabletID 72057594037927937 is [48:56:2097] sender: [48:104:2057] recipient: [48:102:2127] Leader for TabletID 72057594037927937 is [48:105:2128] sender: [48:106:2057] recipient: [48:102:2127] !Reboot 72057594037927937 (actor [48:56:2097]) rebooted! !Reboot 72057594037927937 (actor [48:56:2097]) tablet resolver refreshed! new actor is[48:105:2128] Leader for TabletID 72057594037927937 is [48:105:2128] sender: [48:123:2057] recipient: [48:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:54:2057] recipient: [49:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:54:2057] recipient: [49:50:2095] Leader for TabletID 72057594037927937 is [49:56:2097] sender: [49:57:2057] recipient: [49:50:2095] Leader for TabletID 72057594037927937 is [49:56:2097] sender: [49:74:2057] recipient: [49:14:2061] !Reboot 72057594037927937 (actor [49:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [49:56:2097] sender: [49:102:2057] recipient: [49:36:2083] Leader for TabletID 72057594037927937 is [49:56:2097] sender: [49:105:2057] recipient: [49:14:2061] Leader for TabletID 72057594037927937 is [49:56:2097] sender: [49:106:2057] recipient: [49:104:2129] Leader for TabletID 72057594037927937 is [49:107:2130] sender: [49:108:2057] recipient: [49:104:2129] !Reboot 72057594037927937 (actor [49:56:2097]) rebooted! !Reboot 72057594037927937 (actor [49:56:2097]) tablet resolver refreshed! new actor is[49:107:2130] Leader for TabletID 72057594037927937 is [49:107:2130] sender: [49:161:2057] recipient: [49:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [50:54:2057] recipient: [50:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [50:54:2057] recipient: [50:52:2095] Leader for TabletID 72057594037927937 is [50:56:2097] sender: [50:57:2057] recipient: [50:52:2095] Leader for TabletID 72057594037927937 is [50:56:2097] sender: [50:74:2057] recipient: [50:14:2061] !Reboot 72057594037927937 (actor [50:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [50:56:2097] sender: [50:102:2057] recipient: [50:36:2083] Leader for TabletID 72057594037927937 is [50:56:2097] sender: [50:105:2057] recipient: [50:14:2061] Leader for TabletID 72057594037927937 is [50:56:2097] sender: [50:106:2057] recipient: [50:104:2129] Leader for TabletID 72057594037927937 is [50:107:2130] sender: [50:108:2057] recipient: [50:104:2129] !Reboot 72057594037927937 (actor [50:56:2097]) rebooted! !Reboot 72057594037927937 (actor [50:56:2097]) tablet resolver refreshed! new actor is[50:107:2130] Leader for TabletID 72057594037927937 is [50:107:2130] sender: [50:161:2057] recipient: [50:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [51:54:2057] recipient: [51:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [51:54:2057] recipient: [51:51:2095] Leader for TabletID 72057594037927937 is [51:56:2097] sender: [51:57:2057] recipient: [51:51:2095] Leader for TabletID 72057594037927937 is [51:56:2097] sender: [51:74:2057] recipient: [51:14:2061] !Reboot 72057594037927937 (actor [51:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [51:56:2097] sender: [51:103:2057] recipient: [51:36:2083] Leader for TabletID 72057594037927937 is [51:56:2097] sender: [51:105:2057] recipient: [51:14:2061] Leader for TabletID 72057594037927937 is [51:56:2097] sender: [51:107:2057] recipient: [51:106:2129] Leader for TabletID 72057594037927937 is [51:108:2130] sender: [51:109:2057] recipient: [51:106:2129] !Reboot 72057594037927937 (actor [51:56:2097]) rebooted! !Reboot 72057594037927937 (actor [51:56:2097]) tablet resolver refreshed! new actor is[51:108:2130] Leader for TabletID 72057594037927937 is [51:108:2130] sender: [51:126:2057] recipient: [51:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [52:54:2057] recipient: [52:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [52:54:2057] recipient: [52:52:2095] Leader for TabletID 72057594037927937 is [52:56:2097] sender: [52:57:2057] recipient: [52:52:2095] Leader for TabletID 72057594037927937 is [52:56:2097] sender: [52:74:2057] recipient: [52:14:2061] !Reboot 72057594037927937 (actor [52:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [52:56:2097] sender: [52:105:2057] recipient: [52:36:2083] Leader for TabletID 72057594037927937 is [52:56:2097] sender: [52:108:2057] recipient: [52:14:2061] Leader for TabletID 72057594037927937 is [52:56:2097] sender: [52:109:2057] recipient: [52:107:2131] Leader for TabletID 72057594037927937 is [52:110:2132] sender: [52:111:2057] recipient: [52:107:2131] !Reboot 72057594037927937 (actor [52:56:2097]) rebooted! !Reboot 72057594037927937 (actor [52:56:2097]) tablet resolver refreshed! new actor is[52:110:2132] Leader for TabletID 72057594037927937 is [52:110:2132] sender: [52:164:2057] recipient: [52:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [53:54:2057] recipient: [53:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [53:54:2057] recipient: [53:51:2095] Leader for TabletID 72057594037927937 is [53:56:2097] sender: [53:57:2057] recipient: [53:51:2095] Leader for TabletID 72057594037927937 is [53:56:2097] sender: [53:74:2057] recipient: [53:14:2061] !Reboot 72057594037927937 (actor [53:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [53:56:2097] sender: [53:105:2057] recipient: [53:36:2083] Leader for TabletID 72057594037927937 is [53:56:2097] sender: [53:108:2057] recipient: [53:14:2061] Leader for TabletID 72057594037927937 is [53:56:2097] sender: [53:109:2057] recipient: [53:107:2131] Leader for TabletID 72057594037927937 is [53:110:2132] sender: [53:111:2057] recipient: [53:107:2131] !Reboot 72057594037927937 (actor [53:56:2097]) rebooted! !Reboot 72057594037927937 (actor [53:56:2097]) tablet resolver refreshed! new actor is[53:110:2132] Leader for TabletID 72057594037927937 is [53:110:2132] sender: [53:164:2057] recipient: [53:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [54:54:2057] recipient: [54:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [54:54:2057] recipient: [54:50:2095] Leader for TabletID 72057594037927937 is [54:56:2097] sender: [54:57:2057] recipient: [54:50:2095] Leader for TabletID 72057594037927937 is [54:56:2097] sender: [54:74:2057] recipient: [54:14:2061] !Reboot 72057594037927937 (actor [54:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [54:56:2097] sender: [54:106:2057] recipient: [54:36:2083] Leader for TabletID 72057594037927937 is [54:56:2097] sender: [54:109:2057] recipient: [54:14:2061] Leader for TabletID 72057594037927937 is [54:56:2097] sender: [54:110:2057] recipient: [54:108:2131] Leader for TabletID 72057594037927937 is [54:111:2132] sender: [54:112:2057] recipient: [54:108:2131] !Reboot 72057594037927937 (actor [54:56:2097]) rebooted! !Reboot 72057594037927937 (actor [54:56:2097]) tablet resolver refreshed! new actor is[54:111:2132] Leader for TabletID 72057594037927937 is [0:0:0] sender: [55:54:2057] recipient: [55:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [55:54:2057] recipient: [55:51:2095] Leader for TabletID 72057594037927937 is [55:56:2097] sender: [55:57:2057] recipient: [55:51:2095] Leader for TabletID 72057594037927937 is [55:56:2097] sender: [55:74:2057] recipient: [55:14:2061] >> DataShardWrite::DeletePrepared-Volatile [GOOD] >> DataShardWrite::DelayedVolatileTxAndEvWrite >> TExportToS3Tests::ShouldOmitNonStrictStorageSettings [GOOD] >> TExportToS3Tests::ShouldExcludeBackupTableFromStats >> Worker::Basic [GOOD] >> TKeyValueTest::TestRewriteThenLastValue [GOOD] >> TKeyValueTest::TestRewriteThenLastValueNewApi >> TExportToS3Tests::RebootDuringAbortion [GOOD] >> TExportToS3Tests::SchemaMapping >> TExportToS3Tests::DropSourceTableBeforeTransferring [GOOD] >> TExportToS3Tests::DropCopiesBeforeTransferring1 |75.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |75.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |75.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/test-results/unittest/{meta.json ... results_accumulator.log} |75.2%| [LD] {RESULT} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> Worker::Basic [GOOD] Test command err: 2025-04-17T17:11:57.977730Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494331029253608773:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:11:57.977760Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/002402/r3tmp/tmpL5N5ii/pdisk_1.dat 2025-04-17T17:12:00.145549Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:00.198380Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:12:00.199192Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:12:00.245042Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:12:00.589019Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:29347 TServer::EnableGrpc on GrpcPort 4106, node 1 2025-04-17T17:12:01.712206Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:12:01.712228Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:12:01.712234Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:12:01.712353Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29347 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:12:02.271205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:12:02.459381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744909922581 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) 2025-04-17T17:12:02.631328Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7494331050728446028:2426] Handshake: worker# [1:7494331050728446027:2426] 2025-04-17T17:12:02.631388Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7494331050728446029:2426] Handshake: worker# [1:7494331050728446027:2426] 2025-04-17T17:12:02.636203Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7494331050728446029:2426] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:3:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T17:12:02.671916Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7494331050728446029:2426] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 3] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-17T17:12:02.671981Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7494331050728446029:2426] Send handshake: worker# [1:7494331050728446027:2426] 2025-04-17T17:12:02.672037Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7494331050728446028:2426] Create read session: session# [1:7494331050728446032:2298] 2025-04-17T17:12:02.672122Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7494331050728446027:2426] Handle NKikimr::NReplication::NService::TEvWorker::TEvHandshake 2025-04-17T17:12:02.672139Z node 1 :REPLICATION_SERVICE INFO: [Worker][1:7494331050728446027:2426] Handshake with writer: sender# [1:7494331050728446029:2426] 2025-04-17T17:12:02.672158Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7494331050728446027:2426] Handle NKikimr::NReplication::NService::TEvWorker::TEvHandshake 2025-04-17T17:12:02.672170Z node 1 :REPLICATION_SERVICE INFO: [Worker][1:7494331050728446027:2426] Handshake with reader: sender# [1:7494331050728446028:2426] 2025-04-17T17:12:02.672208Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7494331050728446028:2426] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-04-17T17:12:02.811064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:2, at schemeshard: 72057594046644480 2025-04-17T17:12:02.977987Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494331029253608773:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:12:02.978091Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:12:08.521475Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331076498250012:2383], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:08.521612Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331076498250011:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:08.521645Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331076498250015:2385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:08.521676Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331076498249999:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:08.522944Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:08.572792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710663:2, at schemeshard: 72057594046644480 2025-04-17T17:12:08.620006Z node 1 :TX_PROXY ERROR: Actor# [1:7494331076498250028:2524] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-17T17:12:08.621717Z node 1 :TX_PROXY ERROR: Actor# [1:7494331076498250027:2523] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-17T17:12:08.672287Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494331076498250024:2386], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710663 completed, doublechecking } 2025-04-17T17:12:08.672339Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494331076498250025:2387], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710663 completed, doublechecking } 2025-04-17T17:12:08.672420Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494331076498250026:2388], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710663 completed, doublechecking } 2025-04-17T17:12:08.736319Z node 1 :TX_PROXY ERROR: Actor# [1:7494331076498250083:2561] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:12:08.749846Z n ... s it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:12:08.751271Z node 1 :TX_PROXY ERROR: Actor# [1:7494331076498250096:2571] txid# 281474976710666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:12:14.869914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T17:12:15.588541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:12:15.588566Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:12:17.445182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T17:12:20.843484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T17:12:23.877535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710690:0, at schemeshard: 72057594046644480 2025-04-17T17:12:26.448499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710695:0, at schemeshard: 72057594046644480 2025-04-17T17:12:33.564930Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7494331050728446028:2426] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 36b Offset: 0 SeqNo: 1 CreateTime: 2025-04-17T17:12:33.177000Z MessageGroupId: producer ProducerId: producer }] } } 2025-04-17T17:12:33.576225Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7494331050728446027:2426] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 0 SeqNo: 1 CreateTime: 2025-04-17T17:12:33.177000Z MessageGroupId: producer ProducerId: producer }] } 2025-04-17T17:12:33.576326Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7494331050728446029:2426] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 0 SeqNo: 1 CreateTime: 2025-04-17T17:12:33.177000Z MessageGroupId: producer ProducerId: producer }] } 2025-04-17T17:12:33.576449Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7494331050728446029:2426] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 0 BodySize: 36 }] } 2025-04-17T17:12:33.576557Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7494331183872433505:2426] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-04-17T17:12:33.576587Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7494331050728446029:2426] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2025-04-17T17:12:33.576646Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7494331183872433505:2426] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 0 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2025-04-17T17:12:33.823659Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7494331183872433505:2426] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-17T17:12:33.823703Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7494331050728446029:2426] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2025-04-17T17:12:33.824081Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7494331050728446029:2426] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [0] } 2025-04-17T17:12:33.824458Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7494331050728446027:2426] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-04-17T17:12:33.824493Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7494331050728446028:2426] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-04-17T17:12:35.240455Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7494331050728446028:2426] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 36b Offset: 1 SeqNo: 2 CreateTime: 2025-04-17T17:12:34.464000Z MessageGroupId: producer ProducerId: producer }] } } 2025-04-17T17:12:35.240509Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7494331050728446027:2426] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 1 SeqNo: 2 CreateTime: 2025-04-17T17:12:34.464000Z MessageGroupId: producer ProducerId: producer }] } 2025-04-17T17:12:35.240547Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7494331050728446029:2426] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 1 SeqNo: 2 CreateTime: 2025-04-17T17:12:34.464000Z MessageGroupId: producer ProducerId: producer }] } 2025-04-17T17:12:35.240631Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7494331050728446029:2426] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 36 }] } 2025-04-17T17:12:35.240729Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7494331183872433505:2426] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2025-04-17T17:12:35.523745Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7494331183872433505:2426] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-17T17:12:35.523802Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7494331050728446029:2426] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2025-04-17T17:12:35.523829Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7494331050728446029:2426] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } 2025-04-17T17:12:35.523866Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7494331050728446027:2426] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-04-17T17:12:35.523899Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7494331050728446028:2426] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-04-17T17:12:35.912551Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7494331050728446028:2426] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 36b Offset: 2 SeqNo: 3 CreateTime: 2025-04-17T17:12:35.897000Z MessageGroupId: producer ProducerId: producer }] } } 2025-04-17T17:12:35.912607Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7494331050728446027:2426] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 2 SeqNo: 3 CreateTime: 2025-04-17T17:12:35.897000Z MessageGroupId: producer ProducerId: producer }] } 2025-04-17T17:12:35.912648Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7494331050728446029:2426] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 2 SeqNo: 3 CreateTime: 2025-04-17T17:12:35.897000Z MessageGroupId: producer ProducerId: producer }] } 2025-04-17T17:12:35.912718Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7494331050728446029:2426] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 2 BodySize: 36 }] } 2025-04-17T17:12:35.912787Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7494331183872433505:2426] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2025-04-17T17:12:35.913838Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7494331183872433505:2426] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-17T17:12:35.913879Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7494331050728446029:2426] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2025-04-17T17:12:35.913905Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7494331050728446029:2426] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [2] } 2025-04-17T17:12:35.913940Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7494331050728446027:2426] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-04-17T17:12:35.913969Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7494331050728446028:2426] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-04-17T17:12:36.536213Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7494331050728446028:2426] Handle NKikimr::NReplication::TEvYdbProxy::TEvTopicReaderGone { Result: { status: UNAVAILABLE, issues: {
: Error: PartitionSessionClosed { Partition session id: 1 Topic: "topic" Partition: 0 Reason: ConnectionLost } } } } 2025-04-17T17:12:36.536233Z node 1 :REPLICATION_SERVICE INFO: [RemoteTopicReader][/Root/topic][0][1:7494331050728446028:2426] Leave 2025-04-17T17:12:36.536264Z node 1 :REPLICATION_SERVICE INFO: [Worker][1:7494331050728446027:2426] Reader has gone: sender# [1:7494331050728446028:2426] 2025-04-17T17:12:36.536298Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7494331196757335624:2426] Handshake: worker# [1:7494331050728446027:2426] 2025-04-17T17:12:36.537804Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7494331196757335624:2426] Create read session: session# [1:7494331196757335625:2298] 2025-04-17T17:12:36.537834Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7494331050728446027:2426] Handle NKikimr::NReplication::NService::TEvWorker::TEvHandshake 2025-04-17T17:12:36.537843Z node 1 :REPLICATION_SERVICE INFO: [Worker][1:7494331050728446027:2426] Handshake with reader: sender# [1:7494331196757335624:2426] 2025-04-17T17:12:36.537866Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7494331196757335624:2426] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } >> TestProgram::YqlKernel >> KqpDataIntegrityTrails::BrokenReadLock-UseSink [GOOD] >> TestProgram::YqlKernel [GOOD] >> DataShardWrite::UpsertBrokenLockArbiter [GOOD] >> KqpDataIntegrityTrails::Ddl [GOOD] >> KqpDataIntegrityTrails::Upsert-LogEnabled+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernel [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 3 } Arguments { Id: 4 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\002\213\002?\000\001\235?\002\001\235?\004\001\002\000\t\211\002?\n\235?\000\001\002\000\t\251\000?\020\014Arg\000\000\t\211\002?\014?\020\002\000\t\211\006?\020\203\005@?\020?\020$BlockFunc\000\003?\034\006Add?\026?\026\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 3 } Arguments { Id: 4 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\002\213\002?\000\001\235?\002\001\235?\004\001\002\000\t\211\002?\n\235?\000\001\002\000\t\251\000?\020\014Arg\000\000\t\211\002?\014?\020\002\000\t\211\006?\020\203\005@?\020?\020$BlockFunc\000\003?\034\006Add?\026?\026\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:156;graph_constructed=digraph program {N0[shape=box, label="N3(26):{\"i\":\"3,4\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:3,4"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N1(9):{\"i\":\"3\",\"p\":{\"address\":{\"name\":\"sum\",\"id\":3}},\"o\":\"3\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N2(9):{\"i\":\"4\",\"p\":{\"address\":{\"name\":\"vat\",\"id\":4}},\"o\":\"4\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N4(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N0(4):{\"p\":{\"data\":[{\"name\":\"sum\",\"id\":3},{\"name\":\"vat\",\"id\":4}]},\"o\":\"3,4\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[]}],"nodes":{"2":{"p":{"i":"3","p":{"address":{"name":"sum","id":3}},"o":"3","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"p":{"data":[{"name":"sum","id":3},{"name":"vat","id":4}]},"o":"3,4","t":"FetchOriginalData"},"w":4,"id":6},"5":{"p":{"i":"15","t":"Projection"},"w":26,"id":5},"4":{"p":{"i":"4","p":{"address":{"name":"vat","id":4}},"o":"4","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"3,4","p":{"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":26,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; >> TExportToS3Tests::ShouldSucceedOnManyTables [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentTxs >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap+useOltpSink [GOOD] >> TColumnEngineTestLogs::IndexWriteLoadRead [GOOD] >> KqpDataIntegrityTrails::BrokenReadLock+UseSink [GOOD] >> TExportToS3Tests::DropCopiesBeforeTransferring1 [GOOD] >> TExportToS3Tests::DropCopiesBeforeTransferring2 ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLock-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 27990, MsgBus: 23354 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/001736/r3tmp/tmp7Sf1kH/pdisk_1.dat TServer::EnableGrpc on GrpcPort 27990, node 1 TClient is connected to server localhost:23354 TClient is connected to server localhost:23354 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> TExportToS3Tests::CancelUponTransferringSingleShardTableShouldSucceed [GOOD] >> TExportToS3Tests::CancelUponTransferringMultiShardTableShouldSucceed >> TestProgram::JsonExistsBinary >> TColumnEngineTestInsertTable::TestInsertCommit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexWriteLoadRead [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:144;event=RegisterTable;path_id=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:144;event=RegisterTable;path_id=2; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=uint64::0;records=1;size=8; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=1072;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=0;snapshot=plan_step=1;tx_id=0;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=0;snapshot=plan_step=1;tx_id=2;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=2;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;));); ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::UpsertBrokenLockArbiter [GOOD] Test command err: 2025-04-17T17:11:24.440921Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:11:24.441524Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:11:24.441581Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/00172c/r3tmp/tmpSmNgxY/pdisk_1.dat 2025-04-17T17:11:24.842809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:11:24.892121Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:24.937673Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:11:24.937781Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:11:24.949183Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:11:25.043944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:11:25.094643Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:11:25.095665Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:11:25.096230Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T17:11:25.096498Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:11:25.145511Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:11:25.146265Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:11:25.146383Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:11:25.147855Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:11:25.147930Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:11:25.147975Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:11:25.148451Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:11:25.148591Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:11:25.148661Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T17:11:25.159798Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:11:25.199729Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:11:25.199903Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:11:25.200005Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T17:11:25.200048Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:11:25.200080Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:11:25.200134Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:11:25.200350Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:11:25.200397Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:11:25.200646Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:11:25.200732Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:11:25.200823Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:11:25.200861Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:11:25.200894Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T17:11:25.200926Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:11:25.200960Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:11:25.200984Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:11:25.201022Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:11:25.201123Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:11:25.201151Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:11:25.201205Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T17:11:25.201542Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T17:11:25.201578Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:11:25.201665Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:11:25.201854Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T17:11:25.201894Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:11:25.201968Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:11:25.202005Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:11:25.202040Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T17:11:25.202079Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T17:11:25.202109Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:11:25.202329Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T17:11:25.202359Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T17:11:25.202390Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:11:25.202423Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:11:25.202468Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T17:11:25.202501Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:11:25.202537Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T17:11:25.202563Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:11:25.202582Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T17:11:25.203710Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T17:11:25.203747Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:11:25.214579Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:11:25.214640Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:11:25.214672Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:11:25.214717Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-17T17:11:25.214785Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T17:11:25.386041Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:11:25.386097Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:11:25.386134Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T17:11:25.386475Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-17T17:11:25.386512Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:11:25.386615Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:11:25.386661Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-17T17:11:25.386708Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-17T17:11:25.386754Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-17T17:11:25.398900Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T17:11:25.398978Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:11:25.399795Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:11:25.399840Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:11:25.399887Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:11:2 ... d: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-04-17T17:12:45.294273Z node 7 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[7:934:2775], 1001} after executionsCount# 1 2025-04-17T17:12:45.294312Z node 7 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[7:934:2775], 1001} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T17:12:45.294359Z node 7 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[7:934:2775], 1001} finished in read 2025-04-17T17:12:45.294404Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2025-04-17T17:12:45.294771Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit ExecuteRead 2025-04-17T17:12:45.294798Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037889 to execution unit CompletedOperations 2025-04-17T17:12:45.294821Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit CompletedOperations 2025-04-17T17:12:45.294860Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2025-04-17T17:12:45.295205Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit CompletedOperations 2025-04-17T17:12:45.295227Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037889 has finished 2025-04-17T17:12:45.295247Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-04-17T17:12:45.295323Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-04-17T17:12:45.306545Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [7:939:2780], Recipient [7:718:2598]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:12:45.306583Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:12:45.306610Z node 7 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [7:938:2779], serverId# [7:939:2780], sessionId# [0:0:0] 2025-04-17T17:12:45.306666Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [7:937:2778], Recipient [7:718:2598]: NKikimrTxDataShard.TEvGetInfoRequest 2025-04-17T17:12:45.307368Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [7:942:2783], Recipient [7:718:2598]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:12:45.307400Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:12:45.307426Z node 7 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [7:941:2782], serverId# [7:942:2783], sessionId# [0:0:0] 2025-04-17T17:12:45.307575Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [7:940:2781], Recipient [7:718:2598]: NKikimrTxDataShard.TEvRead ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-04-17T17:12:45.307684Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 0 2025-04-17T17:12:45.307716Z node 7 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037890 CompleteEdge# v1004/1000004 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-17T17:12:45.307768Z node 7 :TX_DATASHARD TRACE: 72075186224037890 changed HEAD read to non-repeatable v1004/18446744073709551615 2025-04-17T17:12:45.307813Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037890 on unit CheckRead 2025-04-17T17:12:45.307870Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037890 is Executed 2025-04-17T17:12:45.307899Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037890 executing on unit CheckRead 2025-04-17T17:12:45.307921Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037890 to execution unit BuildAndWaitDependencies 2025-04-17T17:12:45.307942Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037890 on unit BuildAndWaitDependencies 2025-04-17T17:12:45.307984Z node 7 :TX_DATASHARD TRACE: Activated operation [0:6] at 72075186224037890 2025-04-17T17:12:45.308010Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037890 is Executed 2025-04-17T17:12:45.308029Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037890 executing on unit BuildAndWaitDependencies 2025-04-17T17:12:45.308047Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037890 to execution unit ExecuteRead 2025-04-17T17:12:45.308071Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037890 on unit ExecuteRead 2025-04-17T17:12:45.308161Z node 7 :TX_DATASHARD TRACE: 72075186224037890 Execute read# 1, request: { ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-04-17T17:12:45.308279Z node 7 :TX_DATASHARD TRACE: 72075186224037890 Complete read# {[7:940:2781], 1002} after executionsCount# 1 2025-04-17T17:12:45.308308Z node 7 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[7:940:2781], 1002} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T17:12:45.308347Z node 7 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[7:940:2781], 1002} finished in read 2025-04-17T17:12:45.308380Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037890 is Executed 2025-04-17T17:12:45.308397Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037890 executing on unit ExecuteRead 2025-04-17T17:12:45.308413Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037890 to execution unit CompletedOperations 2025-04-17T17:12:45.308430Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037890 on unit CompletedOperations 2025-04-17T17:12:45.308465Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037890 is Executed 2025-04-17T17:12:45.308487Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037890 executing on unit CompletedOperations 2025-04-17T17:12:45.308507Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037890 has finished 2025-04-17T17:12:45.308528Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037890 2025-04-17T17:12:45.308587Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037890 2025-04-17T17:12:45.328982Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [7:945:2786], Recipient [7:716:2596]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:12:45.329028Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:12:45.329057Z node 7 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [7:944:2785], serverId# [7:945:2786], sessionId# [0:0:0] 2025-04-17T17:12:45.329108Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [7:943:2784], Recipient [7:716:2596]: NKikimrTxDataShard.TEvGetInfoRequest 2025-04-17T17:12:45.342991Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [7:948:2789], Recipient [7:716:2596]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:12:45.343029Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:12:45.343063Z node 7 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [7:947:2788], serverId# [7:948:2789], sessionId# [0:0:0] 2025-04-17T17:12:45.343524Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [7:946:2787], Recipient [7:716:2596]: NKikimrTxDataShard.TEvRead ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-04-17T17:12:45.343893Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037891, FollowerId 0 2025-04-17T17:12:45.343921Z node 7 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037891 CompleteEdge# v1004/1000004 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-17T17:12:45.343944Z node 7 :TX_DATASHARD TRACE: 72075186224037891 changed HEAD read to non-repeatable v1004/18446744073709551615 2025-04-17T17:12:45.352291Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037891 on unit CheckRead 2025-04-17T17:12:45.352374Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037891 is Executed 2025-04-17T17:12:45.352404Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037891 executing on unit CheckRead 2025-04-17T17:12:45.352431Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037891 to execution unit BuildAndWaitDependencies 2025-04-17T17:12:45.352456Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037891 on unit BuildAndWaitDependencies 2025-04-17T17:12:45.352503Z node 7 :TX_DATASHARD TRACE: Activated operation [0:6] at 72075186224037891 2025-04-17T17:12:45.352534Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037891 is Executed 2025-04-17T17:12:45.352556Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037891 executing on unit BuildAndWaitDependencies 2025-04-17T17:12:45.352578Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037891 to execution unit ExecuteRead 2025-04-17T17:12:45.352904Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037891 on unit ExecuteRead 2025-04-17T17:12:45.352976Z node 7 :TX_DATASHARD TRACE: 72075186224037891 Execute read# 1, request: { ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-04-17T17:12:45.353733Z node 7 :TX_DATASHARD TRACE: 72075186224037891 Complete read# {[7:946:2787], 1003} after executionsCount# 1 2025-04-17T17:12:45.353774Z node 7 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[7:946:2787], 1003} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T17:12:45.354118Z node 7 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[7:946:2787], 1003} finished in read 2025-04-17T17:12:45.354162Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037891 is Executed 2025-04-17T17:12:45.354184Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037891 executing on unit ExecuteRead 2025-04-17T17:12:45.354204Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037891 to execution unit CompletedOperations 2025-04-17T17:12:45.354225Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037891 on unit CompletedOperations 2025-04-17T17:12:45.354262Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037891 is Executed 2025-04-17T17:12:45.354279Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037891 executing on unit CompletedOperations 2025-04-17T17:12:45.354582Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037891 has finished 2025-04-17T17:12:45.354607Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037891 2025-04-17T17:12:45.354671Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037891 >> TestProgram::JsonExistsBinary [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Ddl [GOOD] Test command err: Trying to start YDB, gRPC: 18814, MsgBus: 10135 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/00171f/r3tmp/tmpF3SpWM/pdisk_1.dat TServer::EnableGrpc on GrpcPort 18814, node 1 TClient is connected to server localhost:10135 TClient is connected to server localhost:10135 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> TExportToS3Tests::SchemaMapping [GOOD] >> TExportToS3Tests::SchemaMappingEncryption |75.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap+useOltpSink [GOOD] Test command err: Trying to start YDB, gRPC: 6244, MsgBus: 24963 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/00171c/r3tmp/tmp9NPtWW/pdisk_1.dat TServer::EnableGrpc on GrpcPort 6244, node 1 TClient is connected to server localhost:24963 TClient is connected to server localhost:24963 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... |75.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |75.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |75.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert-LogEnabled+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 1266, MsgBus: 14260 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/001731/r3tmp/tmpSU3wUd/pdisk_1.dat TServer::EnableGrpc on GrpcPort 1266, node 1 TClient is connected to server localhost:14260 TClient is connected to server localhost:14260 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> GroupWriteTest::Simple [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestInsertTable::TestInsertCommit [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=uint64::0;records=1;size=8; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=insert_table.cpp:43;event=commit_insertion;path_id=0;blob_range={ Blob: DS:0:[2222:1:1:2:100:1:0] Offset: 0 Size: 0 }; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=152;columns=1; >> SystemView::PartitionStatsOneSchemeShard [GOOD] >> SystemView::PartitionStatsOneSchemeShardDataQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonExistsBinary [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\203\021H\214\n\210\203\001H\214\002?6\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?J\203\005@\200\203\005@\202\022\000\003?d6Json2.JsonDocumentSqlExists\202\003?f\000\002\017\003?L\000\003?N\000\003?P\000\003?R\000\027?T?<\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?|\203\005@\200\203\005@\202\022\000\003?\210\"Json2.CompilePath\202\003?\212\000\002\017\003?~\000\003?\200\000\003?\202\000\003?\204\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\203\021H\214\n\210\203\001H\214\002?6\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?J\203\005@\200\203\005@\202\022\000\003?d6Json2.JsonDocumentSqlExists\202\003?f\000\002\017\003?L\000\003?N\000\003?P\000\003?R\000\027?T?<\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?|\203\005@\200\203\005@\202\022\000\003?\210\"Json2.CompilePath\202\003?\212\000\002\017\003?~\000\003?\200\000\003?\202\000\003?\204\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:156;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N3(15):{\"i\":\"6,15\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,6"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N4(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N0->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"i":"6,15","p":{"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"6","p":{"address":{"name":"json_binary","id":6}},"o":"6","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"p":{"data":[{"name":"json_binary","id":6}]},"o":"6","t":"FetchOriginalData"},"w":2,"id":2},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; json_binary: [ 7B226B6579223A2276616C7565227D, 5B5D ] json_binary: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 010100000000000000000000 ] FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9UInt8TypeE; ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLock+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 27870, MsgBus: 3849 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/00171a/r3tmp/tmpuFjurW/pdisk_1.dat TServer::EnableGrpc on GrpcPort 27870, node 1 TClient is connected to server localhost:3849 TClient is connected to server localhost:3849 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> SystemView::TopPartitionsByCpuFields [GOOD] >> SystemView::TopPartitionsByCpuRanges >> TReplicationTests::CreateSequential >> DataShardWrite::PreparedDistributedWritePageFault [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentTxs [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentImport |75.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |75.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |75.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::Simple [GOOD] Test command err: RandomSeed# 11658293565975981188 2025-04-17T17:11:30.642155Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 1 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-04-17T17:11:30.670037Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-04-17T17:11:30.670110Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 1 going to send TEvBlock {TabletId# 1 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-04-17T17:11:30.672776Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-04-17T17:11:30.686687Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 2 going to send TEvCollectGarbage {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-17T17:11:30.689673Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-04-17T17:12:48.505170Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-04-17T17:12:48.505271Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-17T17:12:48.505323Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-04-17T17:12:48.505365Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 33 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-17T17:12:48.595548Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Status# OK} 2025-04-17T17:12:48.595645Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 33 Channel# 0 Status# OK} >> TExportToS3Tests::DropCopiesBeforeTransferring2 [GOOD] >> TExportToS3Tests::EnableChecksumsPersistance >> ResultFormatter::Primitive [GOOD] >> ResultFormatter::Struct [GOOD] >> ResultFormatter::EmptyDict [GOOD] >> ResultFormatter::Dict [GOOD] >> ResultFormatter::Decimal [GOOD] >> ResultFormatter::EmptyResultSet [GOOD] >> ResultFormatter::EmptyList [GOOD] >> ResultFormatter::EmptyTuple [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Void [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleUuidTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Struct [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Tuple [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Variant [GOOD] >> ConvertYdbValueToMiniKQLValueTest::VariantIndexUnderflow [GOOD] >> TGRpcCmsTest::DescribeOptionsTest |75.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |75.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |75.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator >> ResultFormatter::Tuple [GOOD] >> ResultFormatter::Tagged [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::SimpleType [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::TTzDate [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Optional [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::List [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Struct [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Dict [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::PgType [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::PreparedDistributedWritePageFault [GOOD] Test command err: 2025-04-17T17:11:22.389084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:11:22.392562Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:11:22.392645Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/00170d/r3tmp/tmpx6GJH4/pdisk_1.dat 2025-04-17T17:11:25.239451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:11:25.306493Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:25.362289Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:11:25.362424Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:11:25.377533Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:11:25.466407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:11:25.506815Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:11:25.507701Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:11:25.508053Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T17:11:25.509712Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:11:25.590436Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:11:25.606825Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:11:25.607515Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:11:25.631443Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:11:25.631547Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:11:25.631610Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:11:25.632001Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:11:25.632182Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:11:25.632277Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T17:11:25.647616Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:11:25.957817Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:11:25.958954Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:11:25.963434Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T17:11:25.963725Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:11:25.964001Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:11:25.964535Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:11:25.966903Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:11:25.967177Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:11:25.977115Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:11:25.977638Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:11:25.979520Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:11:25.984162Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:11:25.984941Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T17:11:25.985201Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:11:25.985478Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:11:25.985725Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:11:25.986047Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:11:25.987656Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:11:25.987960Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:11:25.996318Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T17:11:25.999571Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T17:11:26.000659Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:11:26.001022Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:11:26.002869Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T17:11:26.003819Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:11:26.008300Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:11:26.008360Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:11:26.008396Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T17:11:26.008424Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T17:11:26.008450Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:11:26.016556Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T17:11:26.016626Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T17:11:26.016663Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:11:26.016698Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:11:26.016748Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T17:11:26.016777Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:11:26.016810Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T17:11:26.016839Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:11:26.016875Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T17:11:26.018249Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T17:11:26.018293Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:11:26.029045Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:11:26.029096Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:11:26.029130Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:11:26.029169Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-17T17:11:26.029233Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T17:11:26.239850Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:11:26.252482Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:11:26.252781Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T17:11:26.261908Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-17T17:11:26.262217Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:11:26.263332Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:11:26.263648Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-17T17:11:26.268174Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-17T17:11:26.268456Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-17T17:11:26.354705Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T17:11:26.355041Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:11:26.391057Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:11:26.391357Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:11:26.391659Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:11:2 ... 75186224037888 is Executed 2025-04-17T17:12:48.591865Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit WaitForPlan 2025-04-17T17:12:48.591916Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit PlanQueue 2025-04-17T17:12:48.592121Z node 7 :TX_DATASHARD DEBUG: Planned transaction txId 1234567890011 at step 3500 at tablet 72075186224037888 { Transactions { TxId: 1234567890011 AckTo { RawX1: 0 RawX2: 0 } } Step: 3500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T17:12:48.592175Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:12:48.592559Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [7:810:2667], Recipient [7:810:2667]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:12:48.592605Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:12:48.592684Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:12:48.592745Z node 7 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:12:48.592796Z node 7 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-17T17:12:48.592851Z node 7 :TX_DATASHARD DEBUG: Found ready operation [3500:1234567890011] in PlanQueue unit at 72075186224037888 2025-04-17T17:12:48.592901Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit PlanQueue 2025-04-17T17:12:48.592955Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-04-17T17:12:48.593019Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit PlanQueue 2025-04-17T17:12:48.593069Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit LoadWriteDetails 2025-04-17T17:12:48.593119Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit LoadTxDetails 2025-04-17T17:12:48.605647Z node 7 :TX_DATASHARD TRACE: Parsing write transaction for 1234567890011 at 72075186224037888, record: Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC } TxId: 1234567890011 TxMode: MODE_PREPARE Locks { Op: Commit } 2025-04-17T17:12:48.605826Z node 7 :TX_DATASHARD TRACE: Table /Root/table, shard: 72075186224037888, write point (Int32 : 1) 2025-04-17T17:12:48.605911Z node 7 :TX_DATASHARD TRACE: -- AddWriteRange: (Int32 : 1) table: [72057594046644480:2:1] 2025-04-17T17:12:48.606029Z node 7 :TX_DATASHARD DEBUG: LoadWriteDetails at 72075186224037888 loaded writeOp from db 3500:1234567890011 keys extracted: 1 2025-04-17T17:12:48.606096Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-04-17T17:12:48.606137Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit LoadWriteDetails 2025-04-17T17:12:48.606171Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-17T17:12:48.606209Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-17T17:12:48.606289Z node 7 :TX_DATASHARD TRACE: Operation [3500:1234567890011] is the new logically complete end at 72075186224037888 2025-04-17T17:12:48.606345Z node 7 :TX_DATASHARD TRACE: Operation [3500:1234567890011] is the new logically incomplete end at 72075186224037888 2025-04-17T17:12:48.606396Z node 7 :TX_DATASHARD TRACE: Activated operation [3500:1234567890011] at 72075186224037888 2025-04-17T17:12:48.606468Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-04-17T17:12:48.606537Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T17:12:48.606564Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit BuildWriteOutRS 2025-04-17T17:12:48.606592Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit BuildWriteOutRS 2025-04-17T17:12:48.606640Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-04-17T17:12:48.606664Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit BuildWriteOutRS 2025-04-17T17:12:48.606686Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit StoreAndSendWriteOutRS 2025-04-17T17:12:48.606710Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit StoreAndSendWriteOutRS 2025-04-17T17:12:48.606735Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-04-17T17:12:48.606761Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit StoreAndSendWriteOutRS 2025-04-17T17:12:48.606784Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit PrepareWriteTxInRS 2025-04-17T17:12:48.606809Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit PrepareWriteTxInRS 2025-04-17T17:12:48.606837Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-04-17T17:12:48.606860Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit PrepareWriteTxInRS 2025-04-17T17:12:48.606883Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit LoadAndWaitInRS 2025-04-17T17:12:48.606903Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit LoadAndWaitInRS 2025-04-17T17:12:48.606934Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-04-17T17:12:48.606959Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit LoadAndWaitInRS 2025-04-17T17:12:48.606982Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit ExecuteWrite 2025-04-17T17:12:48.607006Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit ExecuteWrite 2025-04-17T17:12:48.607048Z node 7 :TX_DATASHARD DEBUG: Executing write operation for [3500:1234567890011] at 72075186224037888 2025-04-17T17:12:48.607469Z node 7 :TX_DATASHARD TRACE: Tablet 72075186224037888 is not ready for [3500:1234567890011] execution 2025-04-17T17:12:48.607579Z node 7 :TX_DATASHARD DEBUG: tx 1234567890011 at 72075186224037888 released its data 2025-04-17T17:12:48.607646Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Restart 2025-04-17T17:12:48.607684Z node 7 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T17:12:48.607736Z node 7 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-17T17:12:48.607788Z node 7 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:12:48.607841Z node 7 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:12:48.608377Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:12:48.608439Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit ExecuteWrite 2025-04-17T17:12:48.608488Z node 7 :TX_DATASHARD DEBUG: Executing write operation for [3500:1234567890011] at 72075186224037888 2025-04-17T17:12:48.608834Z node 7 :TX_DATASHARD TRACE: Parsing write transaction for 1234567890011 at 72075186224037888, record: Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC } TxId: 1234567890011 TxMode: MODE_PREPARE Locks { Op: Commit } 2025-04-17T17:12:48.608929Z node 7 :TX_DATASHARD TRACE: Table /Root/table, shard: 72075186224037888, write point (Int32 : 1) 2025-04-17T17:12:48.608997Z node 7 :TX_DATASHARD TRACE: -- AddWriteRange: (Int32 : 1) table: [72057594046644480:2:1] 2025-04-17T17:12:48.609093Z node 7 :TX_DATASHARD DEBUG: tx 1234567890011 at 72075186224037888 restored its data 2025-04-17T17:12:48.609267Z node 7 :TX_DATASHARD DEBUG: Executed write operation for [3500:1234567890011] at 72075186224037888, row count=1 2025-04-17T17:12:48.609334Z node 7 :TX_DATASHARD TRACE: Lock 1234567890001 marked broken at v{min} 2025-04-17T17:12:48.609430Z node 7 :TX_DATASHARD TRACE: add locks to result: 0 2025-04-17T17:12:48.609509Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:12:48.609567Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit ExecuteWrite 2025-04-17T17:12:48.609612Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit CompleteWrite 2025-04-17T17:12:48.609655Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit CompleteWrite 2025-04-17T17:12:48.609893Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is DelayComplete 2025-04-17T17:12:48.609932Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit CompleteWrite 2025-04-17T17:12:48.609978Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T17:12:48.610016Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit CompletedOperations 2025-04-17T17:12:48.610055Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-04-17T17:12:48.610084Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T17:12:48.610131Z node 7 :TX_DATASHARD TRACE: Execution plan for [3500:1234567890011] at 72075186224037888 has finished 2025-04-17T17:12:48.610182Z node 7 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:12:48.610224Z node 7 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-17T17:12:48.610268Z node 7 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:12:48.610310Z node 7 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:12:48.610962Z node 7 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3500} 2025-04-17T17:12:48.611546Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:12:48.611602Z node 7 :TX_DATASHARD TRACE: Complete execution for [3500:1234567890011] at 72075186224037888 on unit CompleteWrite 2025-04-17T17:12:48.611675Z node 7 :TX_DATASHARD DEBUG: Complete write [3500 : 1234567890011] from 72075186224037888 at tablet 72075186224037888 send result to client [7:803:2661] 2025-04-17T17:12:48.611727Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> ResultFormatter::FormatEmptySchema [GOOD] >> ResultFormatter::FormatNonEmptySchema [GOOD] |75.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::EmptyTuple [GOOD] |75.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Struct [GOOD] |75.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Decimal [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleBool [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleBoolTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleDecimal [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleDecimalTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalString [GOOD] >> ConvertYdbValueToMiniKQLValueTest::PgValue [GOOD] >> CellsFromTupleTest::CellsFromTupleSuccess [GOOD] >> CellsFromTupleTest::CellsFromTupleSuccessPg >> CellsFromTupleTest::CellsFromTupleSuccessPg [GOOD] >> CellsFromTupleTest::CellsFromTupleFails [GOOD] >> CellsFromTupleTest::CellsFromTupleFailsPg [GOOD] >> CompressionTests::Zstd [GOOD] >> CompressionTests::Unsupported [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::DecimalType [GOOD] |75.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::VariantIndexUnderflow [GOOD] |75.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Tagged [GOOD] |75.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |75.4%| [LD] {RESULT} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |75.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |75.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::PgType [GOOD] >> TExportToS3Tests::SchemaMappingEncryption [GOOD] >> TReplicationTests::CreateSequential [GOOD] >> TReplicationTests::CreateInParallel |75.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::FormatNonEmptySchema [GOOD] |75.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::PgValue [GOOD] |75.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |75.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |75.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup >> ConvertMiniKQLValueToYdbValueTest::SimpleInt32 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleInt64 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzDate [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzDateTime [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzTimeStamp [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleDecimal [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleUuid [GOOD] >> SystemView::AuthUsers [GOOD] >> SystemView::AuthUsers_LockUnlock |75.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::DecimalType [GOOD] |75.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |75.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |75.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |75.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLValueToYdbValueTest::SimpleUuid [GOOD] >> DataShardWrite::DelayedVolatileTxAndEvWrite [GOOD] >> TExportToS3Tests::EnableChecksumsPersistance [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentImport [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleBool [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalString [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalEmpty [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalOptionalEmpty [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalOptionalEmpty2 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::List [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Dict [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleInt32 [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzDate [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzDateTime [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzTimeStamp [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleInt32TypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleUuid [GOOD] >> SystemView::ConcurrentScans [GOOD] >> SystemView::GroupsFields >> ConvertYdbPermissionNameToACLAttrs::TestEqualGranularAndDeprecatedAcl [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalEmpty [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalOptionalEmpty [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalOptionalEmpty2 [GOOD] >> ConvertYdbValueToMiniKQLValueTest::List [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Dict [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Void [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Struct [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Tuple [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Variant [GOOD] >> ConvertTableDescription::StorageSettings >> ConvertTableDescription::StorageSettings [GOOD] >> ConvertTableDescription::ColumnFamilies [GOOD] >> ConvertYdbPermissionNameToACLAttrs::SimpleConvertGood [GOOD] |75.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLValueToYdbValueTest::Dict [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::SchemaMappingEncryption [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:12:15.305106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:12:15.305688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:12:15.305989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:12:15.306806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:12:15.307728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:12:15.308010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:12:15.308784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:12:15.309910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:12:15.312645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:12:16.728343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:12:16.728884Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:12:16.918489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:12:16.927543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:12:16.928622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:12:17.070333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:12:17.078151Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:12:17.101285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:12:17.125033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:12:17.214793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:12:17.272463Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:12:17.272862Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:12:17.273259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:12:17.273884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:12:17.274169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:12:17.280304Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:12:17.339616Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:12:19.820522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:12:19.822889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:19.824802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:12:19.827013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:12:19.827694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:19.865918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:12:19.868479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:12:19.870097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:19.871047Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:12:19.871695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:12:19.872086Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:12:19.898010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:19.898081Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:12:19.898970Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:12:19.921366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:19.921701Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:19.922011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:12:19.922300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:12:19.976454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:12:20.007136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:12:20.009605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:12:20.019080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:12:20.019216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:12:20.019254Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:12:20.021187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:12:20.021476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:12:20.023772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:12:20.028835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:12:20.034214Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:12:20.034269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:12:20.034520Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:12:20.034568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:12:20.034927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:20.034974Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:12:20.035056Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:12:20.035089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:12:20.035121Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:12:20.035146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:12:20.035190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:12:20.035229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:12:20.035258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:12:20.035289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:12:20.035355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:12:20.035388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:12:20.035808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:12:20.071682Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:12:20.081015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:12:20.081694Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 046678944, LocalPathId: 4], 18446744073709551615 2025-04-17T17:12:50.684507Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-04-17T17:12:50.685730Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:12:50.685777Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:12:50.685902Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-17T17:12:50.685999Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:12:50.686037Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 1 2025-04-17T17:12:50.686070Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710763 2025-04-17T17:12:50.686777Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-17T17:12:50.686902Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-17T17:12:50.686945Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710763 2025-04-17T17:12:50.686990Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2025-04-17T17:12:50.687058Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-17T17:12:50.687440Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-17T17:12:50.687514Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-17T17:12:50.687543Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710763 2025-04-17T17:12:50.687571Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-17T17:12:50.687601Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-17T17:12:50.687677Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710763, subscribers: 1 2025-04-17T17:12:50.687722Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:123:2149] 2025-04-17T17:12:50.690218Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-04-17T17:12:50.690507Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-04-17T17:12:50.690569Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2025-04-17T17:12:50.690612Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710763 2025-04-17T17:12:50.690650Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T17:12:50.690675Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763 2025-04-17T17:12:50.690706Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763, id# 103, itemIdx# 4294967295 2025-04-17T17:12:50.692750Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T17:12:50.692838Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-17T17:12:50.692890Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [4:506:2464] TestWaitNotification: OK eventTxId 103 2025-04-17T17:12:50.694328Z node 4 :EXPORT DEBUG: TExport::TTxForget, dropping export tables, info: { Id: 103 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 4] UserSID: '(empty maybe)' PeerName: '' State: Done WaitTxId: 281474976710763 Issue: '' Items: 2 PendingItems: 0 PendingDropItems: 0 } 2025-04-17T17:12:50.696462Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T17:12:50.696512Z node 4 :EXPORT DEBUG: TExport::TTxProgress: Resume: id# 103 2025-04-17T17:12:50.696592Z node 4 :EXPORT INFO: TExport::TTxProgress: Allocate txId: info# { Id: 103 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 4] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 0 Issue: '' Items: 2 PendingItems: 0 PendingDropItems: 0 } 2025-04-17T17:12:50.696657Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T17:12:50.696754Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 103, at schemeshard: 72057594046678944 2025-04-17T17:12:50.696797Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T17:12:50.696833Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnAllocateResult: txId# 281474976710764, id# 103 2025-04-17T17:12:50.696908Z node 4 :EXPORT INFO: TExport::TTxProgress: Drop propose: info# { Id: 103 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 4] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 0 Issue: '' Items: 2 PendingItems: 0 PendingDropItems: 0 }, txId# 281474976710764 2025-04-17T17:12:50.697014Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T17:12:50.699416Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "export-103" } Internal: true } TxId: 281474976710764 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:12:50.699563Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TRmDir Propose, path: /MyRoot/export-103, pathId: 0, opId: 281474976710764:0, at schemeshard: 72057594046678944 2025-04-17T17:12:50.699721Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710764:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/export-103', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 4], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000010, drop txId: 281474976710763, at schemeshard: 72057594046678944 2025-04-17T17:12:50.701987Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710764, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-103\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 4], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000010, drop txId: 281474976710763" TxId: 281474976710764 SchemeshardId: 72057594046678944 PathId: 4 PathDropTxId: 281474976710763, at schemeshard: 72057594046678944 2025-04-17T17:12:50.702150Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710764, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/export-103', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 4], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000010, drop txId: 281474976710763, operation: DROP DIRECTORY, path: /MyRoot/export-103 2025-04-17T17:12:50.702306Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976710764, status# StatusPathDoesNotExist 2025-04-17T17:12:50.702426Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-103\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 4], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000010, drop txId: 281474976710763" TxId: 281474976710764 SchemeshardId: 72057594046678944 PathId: 4 PathDropTxId: 281474976710763 2025-04-17T17:12:50.702502Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T17:12:50.702542Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnModifyResult: txId# 281474976710764, status# StatusPathDoesNotExist 2025-04-17T17:12:50.702629Z node 4 :EXPORT TRACE: Message: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-103\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 4], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000010, drop txId: 281474976710763" TxId: 281474976710764 SchemeshardId: 72057594046678944 PathId: 4 PathDropTxId: 281474976710763 2025-04-17T17:12:50.702758Z node 4 :EXPORT INFO: TExport::TTxProgress: Wait for completion: info# { Id: 103 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 4] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 281474976710763 Issue: '' Items: 2 PendingItems: 0 PendingDropItems: 0 }, itemIdx# 4294967295, txId# 281474976710763 2025-04-17T17:12:50.704641Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T17:12:50.704788Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710763, at schemeshard: 72057594046678944 2025-04-17T17:12:50.704897Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2025-04-17T17:12:50.704962Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710763 2025-04-17T17:12:50.705012Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T17:12:50.705047Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763 2025-04-17T17:12:50.705105Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763, id# 103, itemIdx# 4294967295 2025-04-17T17:12:50.707088Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 103 2025-04-17T17:12:50.707406Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-17T17:12:50.707480Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-17T17:12:50.708013Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-17T17:12:50.708129Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-17T17:12:50.708176Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [4:962:2891] TestWaitNotification: OK eventTxId 103 |75.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::SimpleUuid [GOOD] >> TContinuousBackupTests::TakeIncrementalBackup >> TContinuousBackupTests::Basic |75.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::Dict [GOOD] |75.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbPermissionNameToACLAttrs::SimpleConvertGood [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::TTzDateTime [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::TTzTimeStamp [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::UuidType [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::VariantTuple [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::VariantStruct [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Void [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Tuple [GOOD] >> DataShardReadIterator::ShouldRangeReadReverseLeftInclusive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::DelayedVolatileTxAndEvWrite [GOOD] Test command err: 2025-04-17T17:11:25.277163Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:11:25.277643Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:11:25.277698Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/0016f1/r3tmp/tmpeKSsEC/pdisk_1.dat 2025-04-17T17:11:27.706208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:11:27.785407Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:27.831135Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:11:27.831250Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:11:27.846838Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:11:27.977524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:11:28.117424Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:11:28.126828Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:11:28.129710Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T17:11:28.132419Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:11:28.296023Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:11:28.296986Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:11:28.297122Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:11:28.298999Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:11:28.299086Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:11:28.299162Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:11:28.299594Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:11:28.299735Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:11:28.299818Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T17:11:28.310860Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:11:28.383309Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:11:28.383500Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:11:28.383629Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T17:11:28.383668Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:11:28.383716Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:11:28.383769Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:11:28.384014Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:11:28.384068Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:11:28.385374Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:11:28.385489Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:11:28.385642Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:11:28.385727Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:11:28.385767Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T17:11:28.385805Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:11:28.385839Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:11:28.385874Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:11:28.385922Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:11:28.386056Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:11:28.386097Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:11:28.386136Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T17:11:28.386553Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T17:11:28.386605Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:11:28.386713Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:11:28.386971Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T17:11:28.387025Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:11:28.387112Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:11:28.387155Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:11:28.387198Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T17:11:28.387233Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T17:11:28.387266Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:11:28.387574Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T17:11:28.387634Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T17:11:28.387674Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:11:28.387712Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:11:28.387760Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T17:11:28.387796Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:11:28.387827Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T17:11:28.387858Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:11:28.387884Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T17:11:28.389448Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T17:11:28.389497Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:11:28.404727Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:11:28.404811Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:11:28.404849Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:11:28.404913Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-17T17:11:28.405003Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T17:11:28.566038Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:11:28.566090Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:11:28.566141Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T17:11:28.566467Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-17T17:11:28.566501Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:11:28.566613Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:11:28.566655Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-17T17:11:28.566692Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-17T17:11:28.566725Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-17T17:11:28.575051Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T17:11:28.575125Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:11:28.575988Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:11:28.576035Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:11:28.576081Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:11:2 ... p# 1524 txid# 281474976715660 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-17T17:12:50.893374Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-17T17:12:50.893400Z node 7 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715660 2025-04-17T17:12:50.893474Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 1524 txid# 281474976715660 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-17T17:12:50.893575Z node 7 :TX_DATASHARD DEBUG: Complete volatile write [1524 : 281474976715660] from 72075186224037889 at tablet 72075186224037889 send result to client [7:917:2664] 2025-04-17T17:12:50.894203Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:12:50.894389Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T17:12:50.894914Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:12:50.895178Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:12:50.898110Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2025-04-17T17:12:50.898260Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [7:698:2586], Recipient [7:700:2588]: {TEvReadSet step# 1524 txid# 281474976715660 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2025-04-17T17:12:50.898307Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:12:50.898367Z node 7 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715660 2025-04-17T17:12:50.898773Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-04-17T17:12:50.899012Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [7:700:2588], Recipient [7:698:2586]: {TEvReadSet step# 1524 txid# 281474976715660 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2025-04-17T17:12:50.899045Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:12:50.899071Z node 7 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715660 ... validating table 2025-04-17T17:12:51.317052Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01js2btbf5a3kz6k6hvva6rwvj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MmM4Y2VlYmMtOTg5MThhN2UtMjQ1YjIzYTctYzNmNzEwNzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:12:51.320411Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [7:970:2783], Recipient [7:698:2586]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 2 Columns: 3 Columns: 4 Columns: 1 Snapshot { Step: 1524 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-04-17T17:12:51.320647Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-17T17:12:51.320751Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CheckRead 2025-04-17T17:12:51.320892Z node 7 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-17T17:12:51.320947Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CheckRead 2025-04-17T17:12:51.320989Z node 7 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-17T17:12:51.321030Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-17T17:12:51.321079Z node 7 :TX_DATASHARD TRACE: Activated operation [0:5] at 72075186224037888 2025-04-17T17:12:51.321135Z node 7 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-17T17:12:51.321159Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T17:12:51.321183Z node 7 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit ExecuteRead 2025-04-17T17:12:51.321205Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2025-04-17T17:12:51.321328Z node 7 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 2 Columns: 3 Columns: 4 Columns: 1 Snapshot { Step: 1524 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-04-17T17:12:51.321611Z node 7 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v1524/18446744073709551615 2025-04-17T17:12:51.321672Z node 7 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[7:970:2783], 0} after executionsCount# 1 2025-04-17T17:12:51.321742Z node 7 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[7:970:2783], 0} sends rowCount# 1, bytes# 64, quota rows left# 1000, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T17:12:51.321829Z node 7 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[7:970:2783], 0} finished in read 2025-04-17T17:12:51.321900Z node 7 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-17T17:12:51.321924Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteRead 2025-04-17T17:12:51.321946Z node 7 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T17:12:51.321969Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2025-04-17T17:12:51.322011Z node 7 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-17T17:12:51.322032Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T17:12:51.322060Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:5] at 72075186224037888 has finished 2025-04-17T17:12:51.322110Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-17T17:12:51.322235Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-17T17:12:51.322905Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [7:970:2783], Recipient [7:698:2586]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-17T17:12:51.322969Z node 7 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-04-17T17:12:51.323494Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [7:970:2783], Recipient [7:700:2588]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 2 Columns: 3 Columns: 4 Columns: 1 Snapshot { Step: 1524 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1000 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1000 RangesSize: 1 2025-04-17T17:12:51.323618Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-04-17T17:12:51.323669Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037889 on unit CheckRead 2025-04-17T17:12:51.323717Z node 7 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-04-17T17:12:51.323742Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit CheckRead 2025-04-17T17:12:51.323767Z node 7 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-04-17T17:12:51.323791Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037889 on unit BuildAndWaitDependencies 2025-04-17T17:12:51.323830Z node 7 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037889 2025-04-17T17:12:51.323857Z node 7 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-04-17T17:12:51.323889Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-04-17T17:12:51.323914Z node 7 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037889 to execution unit ExecuteRead 2025-04-17T17:12:51.323936Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037889 on unit ExecuteRead 2025-04-17T17:12:51.324016Z node 7 :TX_DATASHARD TRACE: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 2 Columns: 3 Columns: 4 Columns: 1 Snapshot { Step: 1524 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1000 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1000 } 2025-04-17T17:12:51.324597Z node 7 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v1524/18446744073709551615 2025-04-17T17:12:51.324641Z node 7 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[7:970:2783], 1} after executionsCount# 1 2025-04-17T17:12:51.324675Z node 7 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[7:970:2783], 1} sends rowCount# 1, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T17:12:51.324753Z node 7 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[7:970:2783], 1} finished in read 2025-04-17T17:12:51.324797Z node 7 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-04-17T17:12:51.324821Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit ExecuteRead 2025-04-17T17:12:51.324843Z node 7 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037889 to execution unit CompletedOperations 2025-04-17T17:12:51.324865Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037889 on unit CompletedOperations 2025-04-17T17:12:51.324932Z node 7 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-04-17T17:12:51.324955Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit CompletedOperations 2025-04-17T17:12:51.324977Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037889 has finished 2025-04-17T17:12:51.325002Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-04-17T17:12:51.325083Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-04-17T17:12:51.325679Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [7:970:2783], Recipient [7:700:2588]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2025-04-17T17:12:51.325721Z node 7 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 1 } { items { int32_value: 1 } items { int32_value: 2 } items { int32_value: 3 } items { int32_value: 4 } }, { items { int32_value: 11 } items { int32_value: 12 } items { int32_value: 12 } items { int32_value: 12 } } >> DataShardReadIterator::ShouldReceiveErrorAfterSplit >> SystemView::TopPartitionsByCpuTables [GOOD] >> SystemView::TopPartitionsByTliFields ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::EnableChecksumsPersistance [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:12:27.463099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:12:27.463176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:12:27.463211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:12:27.463251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:12:27.463302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:12:27.463331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:12:27.463379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:12:27.463450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:12:27.463748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:12:28.343522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:12:28.343581Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:12:28.395430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:12:28.398392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:12:28.399365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:12:28.448064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:12:28.448227Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:12:28.448719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:12:28.448896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:12:28.451195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:12:28.452194Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:12:28.452251Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:12:28.452437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:12:28.452476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:12:28.452508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:12:28.452619Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:12:28.464913Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:12:29.141505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:12:29.143246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:29.152944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:12:29.155962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:12:29.156338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:29.179075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:12:29.181475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:12:29.183066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:29.183126Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:12:29.183157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:12:29.183181Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:12:29.199810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:29.200159Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:12:29.200460Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:12:29.223415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:29.223459Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:29.224042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:12:29.225526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:12:29.280521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:12:29.303213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:12:29.309137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:12:29.353650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:12:29.364511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:12:29.364834Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:12:29.367712Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:12:29.368052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:12:29.391198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:12:29.391848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:12:29.447866Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:12:29.448222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:12:29.472914Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:12:29.474025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:12:29.491994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:29.492508Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:12:29.493252Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:12:29.494040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:12:29.494344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:12:29.494908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:12:29.495190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:12:29.495489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:12:29.495802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:12:29.496064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:12:29.508548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:12:29.508881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:12:29.509144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:12:29.564936Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:12:29.570636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:12:29.570960Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T17:12:51.481791Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720758, ready parts: 0/1, is published: true 2025-04-17T17:12:51.500530Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976720758, at schemeshard: 72057594046678944 2025-04-17T17:12:51.500594Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720758, ready parts: 0/1, is published: true 2025-04-17T17:12:51.500636Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976720758, at schemeshard: 72057594046678944 2025-04-17T17:12:51.501777Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976720758:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976720758 msg type: 269090816 2025-04-17T17:12:51.501896Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976720758, partId: 4294967295, tablet: 72057594046316545 2025-04-17T17:12:51.502028Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720758 FAKE_COORDINATOR: Add transaction: 281474976720758 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976720758 at step: 5000007 2025-04-17T17:12:51.508528Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:12:51.508693Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720758 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 17179871339 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:12:51.508760Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976720758:0, step: 5000007, at schemeshard: 72057594046678944 2025-04-17T17:12:51.508926Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976720758:0, at schemeshard: 72057594046678944 2025-04-17T17:12:51.509002Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720758:0 progress is 1/1 2025-04-17T17:12:51.509047Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720758 ready parts: 1/1 2025-04-17T17:12:51.509098Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720758:0 progress is 1/1 2025-04-17T17:12:51.509137Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720758 ready parts: 1/1 2025-04-17T17:12:51.509201Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:12:51.509272Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T17:12:51.509323Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720758, ready parts: 1/1, is published: false 2025-04-17T17:12:51.509391Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720758 ready parts: 1/1 2025-04-17T17:12:51.509439Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976720758:0 2025-04-17T17:12:51.509480Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976720758:0 2025-04-17T17:12:51.509565Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T17:12:51.509611Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976720758, publications: 2, subscribers: 1 2025-04-17T17:12:51.509650Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976720758, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-04-17T17:12:51.509702Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976720758, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-04-17T17:12:51.510638Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720758 2025-04-17T17:12:51.521209Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:12:51.521308Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976720758, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:12:51.521481Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976720758, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-17T17:12:51.521625Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:12:51.521667Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:607:2551], at schemeshard: 72057594046678944, txId: 281474976720758, path id: 1 2025-04-17T17:12:51.521706Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:607:2551], at schemeshard: 72057594046678944, txId: 281474976720758, path id: 3 FAKE_COORDINATOR: Erasing txId 281474976720758 2025-04-17T17:12:51.522437Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976720758 2025-04-17T17:12:51.522559Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976720758 2025-04-17T17:12:51.522619Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976720758 2025-04-17T17:12:51.522671Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-04-17T17:12:51.522725Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-17T17:12:51.523336Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976720758 2025-04-17T17:12:51.523413Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976720758 2025-04-17T17:12:51.523440Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976720758 2025-04-17T17:12:51.523468Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720758, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-17T17:12:51.523497Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T17:12:51.523570Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976720758, subscribers: 1 2025-04-17T17:12:51.523623Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:557:2512] 2025-04-17T17:12:51.541855Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720758 2025-04-17T17:12:51.549075Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720758 2025-04-17T17:12:51.549251Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976720758 2025-04-17T17:12:51.549325Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976720758 2025-04-17T17:12:51.584920Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 442 RawX2: 17179871595 } TabletId: 72075186233409547 State: 4 2025-04-17T17:12:51.585047Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409547, state: Offline, at schemeshard: 72057594046678944 2025-04-17T17:12:51.597488Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-17T17:12:51.598098Z node 4 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409547 2025-04-17T17:12:51.598326Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-17T17:12:51.598660Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-17T17:12:51.609272Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:12:51.609348Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-17T17:12:51.609438Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-17T17:12:51.609490Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-17T17:12:51.609523Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:12:51.621120Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-17T17:12:51.621265Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-17T17:12:51.621521Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 102 2025-04-17T17:12:51.632231Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-17T17:12:51.632316Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-17T17:12:51.632927Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion export in-flight, txId: 102, at schemeshard: 72057594046678944 2025-04-17T17:12:51.632984Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion, export is ready to notify, txId: 102, at schemeshard: 72057594046678944 2025-04-17T17:12:51.633109Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T17:12:51.633159Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:760:2699] TestWaitNotification: OK eventTxId 102 |75.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldSucceedOnConcurrentImport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:12:20.036318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:12:20.036405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:12:20.036466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:12:20.036506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:12:20.037067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:12:20.037096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:12:20.037154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:12:20.037229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:12:20.037527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:12:20.627596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:12:20.627657Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:12:20.785084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:12:20.787947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:12:20.797324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:12:20.918296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:12:20.924913Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:12:20.957574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:12:20.957988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:12:21.015347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:12:21.032514Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:12:21.032936Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:12:21.033984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:12:21.035011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:12:21.035329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:12:21.037046Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:12:21.090653Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:12:23.250134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:12:23.250328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:23.250498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:12:23.250673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:12:23.250722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:23.252478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:12:23.252579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:12:23.252712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:23.252755Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:12:23.252787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:12:23.252816Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:12:23.254059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:23.254100Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:12:23.254130Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:12:23.255196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:23.255247Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:23.255289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:12:23.255332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:12:23.258052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:12:23.259208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:12:23.261362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:12:23.271845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:12:23.273455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:12:23.274080Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:12:23.277131Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:12:23.277193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:12:23.279487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:12:23.280748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:12:23.299282Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:12:23.300953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:12:23.302311Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:12:23.303016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:12:23.306325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:23.306951Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:12:23.308322Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:12:23.308650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:12:23.309023Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:12:23.309362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:12:23.310012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:12:23.310655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:12:23.311412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:12:23.311759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:12:23.312166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:12:23.313093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:12:23.313434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:12:23.340528Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:12:23.341638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:12:23.341986Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710769, path id: 1 2025-04-17T17:12:51.951558Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710769, path id: 6 2025-04-17T17:12:51.952414Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710769:0, at schemeshard: 72057594046678944 2025-04-17T17:12:51.952465Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976710769:0, at schemeshard: 72057594046678944 2025-04-17T17:12:51.952538Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710769 ready parts: 1/1 2025-04-17T17:12:51.952696Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710769 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:12:51.953342Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 18 PathOwnerId: 72057594046678944, cookie: 281474976710769 2025-04-17T17:12:51.953428Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 18 PathOwnerId: 72057594046678944, cookie: 281474976710769 2025-04-17T17:12:51.953454Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710769 2025-04-17T17:12:51.953495Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710769, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 18 2025-04-17T17:12:51.953529Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-17T17:12:51.954339Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710769 2025-04-17T17:12:51.954416Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710769 2025-04-17T17:12:51.954439Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710769 2025-04-17T17:12:51.954465Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710769, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 7 2025-04-17T17:12:51.954490Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-04-17T17:12:51.954559Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710769, ready parts: 0/1, is published: true 2025-04-17T17:12:51.957658Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710769, at schemeshard: 72057594046678944 2025-04-17T17:12:51.957703Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710769, ready parts: 0/1, is published: true 2025-04-17T17:12:51.957736Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710769, at schemeshard: 72057594046678944 2025-04-17T17:12:51.958390Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710769:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710769 msg type: 269090816 2025-04-17T17:12:51.958573Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710769, partId: 4294967295, tablet: 72057594046316545 2025-04-17T17:12:51.958967Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710769 FAKE_COORDINATOR: Add transaction: 281474976710769 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710769 at step: 5000014 2025-04-17T17:12:51.960048Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000014, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:12:51.960182Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710769 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 17179871339 } } Step: 5000014 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:12:51.960233Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710769:0, step: 5000014, at schemeshard: 72057594046678944 2025-04-17T17:12:51.960391Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710769:0, at schemeshard: 72057594046678944 2025-04-17T17:12:51.960456Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710769:0 progress is 1/1 2025-04-17T17:12:51.960495Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710769 ready parts: 1/1 2025-04-17T17:12:51.960543Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710769:0 progress is 1/1 2025-04-17T17:12:51.960582Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710769 ready parts: 1/1 2025-04-17T17:12:51.960634Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-17T17:12:51.960691Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-04-17T17:12:51.960731Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710769, ready parts: 1/1, is published: false 2025-04-17T17:12:51.960777Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710769 ready parts: 1/1 2025-04-17T17:12:51.960815Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710769:0 2025-04-17T17:12:51.960870Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710769:0 2025-04-17T17:12:51.960977Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-04-17T17:12:51.961024Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710769, publications: 2, subscribers: 1 2025-04-17T17:12:51.961066Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710769, [OwnerId: 72057594046678944, LocalPathId: 1], 19 2025-04-17T17:12:51.961100Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710769, [OwnerId: 72057594046678944, LocalPathId: 6], 18446744073709551615 2025-04-17T17:12:51.961921Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710769 2025-04-17T17:12:51.966391Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:12:51.966449Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710769, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:12:51.966584Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710769, path id: [OwnerId: 72057594046678944, LocalPathId: 6] 2025-04-17T17:12:51.966676Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:12:51.966700Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710769, path id: 1 2025-04-17T17:12:51.966724Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710769, path id: 6 FAKE_COORDINATOR: Erasing txId 281474976710769 2025-04-17T17:12:51.967428Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 19 PathOwnerId: 72057594046678944, cookie: 281474976710769 2025-04-17T17:12:51.967515Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 19 PathOwnerId: 72057594046678944, cookie: 281474976710769 2025-04-17T17:12:51.967558Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710769 2025-04-17T17:12:51.967608Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710769, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 19 2025-04-17T17:12:51.967664Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-17T17:12:51.968153Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710769 2025-04-17T17:12:51.968227Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710769 2025-04-17T17:12:51.968251Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710769 2025-04-17T17:12:51.968276Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710769, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 18446744073709551615 2025-04-17T17:12:51.968308Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-04-17T17:12:51.968397Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710769, subscribers: 1 2025-04-17T17:12:51.968438Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:123:2149] 2025-04-17T17:12:51.975905Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710769 2025-04-17T17:12:51.976437Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710769 2025-04-17T17:12:51.976547Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710769 2025-04-17T17:12:51.976629Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710769 2025-04-17T17:12:51.978471Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-17T17:12:51.978541Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [4:936:2864] TestWaitNotification: OK eventTxId 104 |75.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::Tuple [GOOD] >> TExportToS3Tests::CancelUponTransferringMultiShardTableShouldSucceed [GOOD] >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed >> DataShardReadIterator::ShouldReadKeyCellVec |75.5%| [TA] $(B)/ydb/core/tx/datashard/ut_write/test-results/unittest/{meta.json ... results_accumulator.log} >> TReplicationTests::CreateInParallel [GOOD] >> TReplicationTests::CreateDropRecreate |75.5%| [TA] $(B)/ydb/core/ydb_convert/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpBatchDelete::Large_2 [GOOD] |75.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut |75.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut |75.5%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_write/test-results/unittest/{meta.json ... results_accumulator.log} |75.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest |75.5%| [TA] {RESULT} $(B)/ydb/core/ydb_convert/ut/test-results/unittest/{meta.json ... results_accumulator.log} |75.5%| [LD] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut |75.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TContinuousBackupTests::Basic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::Large_2 [GOOD] Test command err: Trying to start YDB, gRPC: 17505, MsgBus: 25961 2025-04-17T17:11:38.530261Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494330950560871307:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:11:38.530330Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/002465/r3tmp/tmprQ3qmg/pdisk_1.dat 2025-04-17T17:11:39.638851Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:39.647462Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:11:39.647540Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:11:39.658670Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:11:39.660099Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 17505, node 1 2025-04-17T17:11:40.009439Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:11:40.009460Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:11:40.009695Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:11:40.010003Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25961 TClient is connected to server localhost:25961 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:11:42.214710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:11:42.229418Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:11:42.240628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:11:42.385369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:11:42.572825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:11:42.654575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:11:43.540307Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494330950560871307:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:11:43.540379Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:11:48.290119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494330993510546087:2417], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:11:48.290247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:11:49.594783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:11:49.858443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:11:50.603643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:11:50.827264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:11:51.024450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:11:51.279058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:11:51.483401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:11:51.904386Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331006395448693:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:11:51.904464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:11:51.906291Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331006395448699:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:11:51.931838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:11:52.358298Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494331006395448701:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:11:52.478754Z node 1 :TX_PROXY ERROR: Actor# [1:7494331010690416074:3624] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:11:54.589210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:11:54.589504Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:56.562830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:12:12.265816Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2brvmd8acznagngkgtk4c2", SessionId: ydb://session/3?node_id=1&id=OTc3YjM4MjAtZDIyNTIzOGQtNTU3MTcxYzMtN2IxNmRkZjA=, Slow query, duration: 10.331997s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n BATCH DELETE FROM LargeTable\n WHERE Key >= 2000;\n ", parameters: 0b Trying to start YDB, gRPC: 1723, MsgBus: 64846 2025-04-17T17:12:20.295169Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494331128213689164:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:12:20.295213Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/002465/r3tmp/tmpMWlzZa/pdisk_1.dat 2025-04-17T17:12:20.908855Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:12:21.016886Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:12:21.017384Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:12:21.026815Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1723, node 2 2025-04-17T17:12:21.954710Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:12:21.954729Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:12:21.954739Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:12:21.955304Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64846 2025-04-17T17:12:25.296207Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494331128213689164:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:12:25.296280Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:64846 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:12:30.193870Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:12:30.209830Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:12:30.322652Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:12:31.013515Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:12:32.764147Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:12:33.690008Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:12:35.844858Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:12:35.844881Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:12:38.020244Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494331201228135017:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:38.020534Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:38.420449Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:12:38.452699Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:12:38.597656Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:12:38.771972Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:12:39.470802Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:12:39.605847Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:12:39.742609Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:12:40.604513Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494331214113037637:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:40.604632Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:40.607443Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494331214113037644:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:40.628579Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:12:40.848438Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494331214113037646:2496], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:12:40.942135Z node 2 :TX_PROXY ERROR: Actor# [2:7494331214113037712:3637] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:12:46.250617Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... |75.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> DataShardReadIterator::ShouldReverseReadMultipleKeys >> DataShardReadIterator::ShouldReadRangeCellVec >> DataShardReadIteratorBatchMode::SelectingColumns ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup/unittest >> TContinuousBackupTests::Basic [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:12:54.017388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:12:54.017480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:12:54.017520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:12:54.017553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:12:54.017594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:12:54.017632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:12:54.017689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:12:54.017764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:12:54.018085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:12:54.104021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:12:54.104070Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:12:54.118406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:12:54.118897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:12:54.119063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:12:54.131554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:12:54.131762Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:12:54.132480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:12:54.132712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:12:54.135708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:12:54.137129Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:12:54.137199Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:12:54.137403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:12:54.137481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:12:54.137531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:12:54.137670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:12:54.148750Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:12:54.294290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:12:54.294546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:54.294765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:12:54.295014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:12:54.295073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:54.301047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:12:54.301199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:12:54.301411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:54.301471Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:12:54.301506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:12:54.301542Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:12:54.308394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:54.308473Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:12:54.308506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:12:54.312201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:54.312261Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:54.312302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:12:54.312351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:12:54.315977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:12:54.321145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:12:54.321406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:12:54.322542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:12:54.322736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:12:54.322789Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:12:54.323224Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:12:54.323281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:12:54.323437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:12:54.323487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:12:54.325497Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:12:54.325531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:12:54.325680Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:12:54.325728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:12:54.326010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:54.326044Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:12:54.326107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:12:54.326135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:12:54.326169Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:12:54.326194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:12:54.326245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:12:54.326286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:12:54.326321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:12:54.326343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:12:54.326389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:12:54.326414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:12:54.326436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:12:54.328778Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:12:54.328891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:12:54.328919Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 04, ready parts: 2/3, is published: true 2025-04-17T17:12:55.596303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T17:12:55.596353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T17:12:55.596376Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-17T17:12:55.596394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: true 2025-04-17T17:12:55.598532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-17T17:12:55.598591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-17T17:12:55.600404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-17T17:12:55.600475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-17T17:12:55.600511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-17T17:12:55.600568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-17T17:12:55.600589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-17T17:12:55.621616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000005 OrderId: 104 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 3371 } } 2025-04-17T17:12:55.621660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2025-04-17T17:12:55.621772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000005 OrderId: 104 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 3371 } } 2025-04-17T17:12:55.621842Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000005 OrderId: 104 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 3371 } } FAKE_COORDINATOR: Erasing txId 104 2025-04-17T17:12:55.622708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2025-04-17T17:12:55.622753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2025-04-17T17:12:55.622849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2025-04-17T17:12:55.622889Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-17T17:12:55.622950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2025-04-17T17:12:55.623003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 104:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T17:12:55.623036Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-17T17:12:55.623062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 104:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-17T17:12:55.623090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 129 -> 240 2025-04-17T17:12:55.624933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-17T17:12:55.625231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-17T17:12:55.625436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-17T17:12:55.625462Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-04-17T17:12:55.625534Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 3/3 2025-04-17T17:12:55.625561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-04-17T17:12:55.625589Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 3/3 2025-04-17T17:12:55.625611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-04-17T17:12:55.625642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 3/3, is published: true 2025-04-17T17:12:55.625690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:333:2312] message: TxId: 104 2025-04-17T17:12:55.625729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-04-17T17:12:55.625768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-17T17:12:55.625791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-17T17:12:55.625886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T17:12:55.625914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:1 2025-04-17T17:12:55.625930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:1 2025-04-17T17:12:55.625954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T17:12:55.625971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:2 2025-04-17T17:12:55.625983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:2 2025-04-17T17:12:55.626019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-17T17:12:55.626259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:12:55.626289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-17T17:12:55.626350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-17T17:12:55.626383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-17T17:12:55.626406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T17:12:55.627917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-17T17:12:55.627961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:727:2642] 2025-04-17T17:12:55.628038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 2025-04-17T17:12:55.628498Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/continuousBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T17:12:55.628677Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/continuousBackupImpl" took 196us result status StatusPathDoesNotExist 2025-04-17T17:12:55.628795Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/continuousBackupImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 2])" Path: "/MyRoot/Table/continuousBackupImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-17T17:12:55.629120Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/continuousBackupImpl/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T17:12:55.629241Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/continuousBackupImpl/streamImpl" took 129us result status StatusPathDoesNotExist 2025-04-17T17:12:55.629336Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/continuousBackupImpl/streamImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 2])" Path: "/MyRoot/Table/continuousBackupImpl/streamImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> ResourcePoolsDdl::TestCreateResourcePoolOnServerless [GOOD] >> ResourcePoolsDdl::TestDefaultPoolRestrictions >> TReplicationTests::CreateDropRecreate [GOOD] >> TReplicationTests::CreateWithoutCredentials >> TTxDataShardMiniKQL::CrossShard_4_OneToAll [GOOD] >> TContinuousBackupTests::TakeIncrementalBackup [GOOD] >> DataShardTxOrder::RandomDotRanges_DelayRS >> DataShardTxOrder::ZigZag_oo8_dirty ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup/unittest >> TContinuousBackupTests::TakeIncrementalBackup [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:12:54.093117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:12:54.093200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:12:54.093238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:12:54.093269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:12:54.093305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:12:54.093333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:12:54.093386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:12:54.093473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:12:54.093774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:12:54.190219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:12:54.190271Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:12:54.209502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:12:54.209656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:12:54.209817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:12:54.229713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:12:54.230105Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:12:54.230828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:12:54.231149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:12:54.249095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:12:54.250498Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:12:54.250569Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:12:54.250700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:12:54.250758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:12:54.250800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:12:54.250987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:12:54.261169Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:12:54.561663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:12:54.561927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:54.562138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:12:54.562419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:12:54.562474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:54.569193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:12:54.569342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:12:54.569545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:54.569619Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:12:54.569673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:12:54.569708Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:12:54.577030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:54.577105Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:12:54.577143Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:12:54.585042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:54.585110Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:54.585158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:12:54.585218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:12:54.588920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:12:54.596987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:12:54.597223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:12:54.598225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:12:54.598376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:12:54.598424Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:12:54.598707Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:12:54.598786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:12:54.598956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:12:54.599038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:12:54.604690Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:12:54.604745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:12:54.604961Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:12:54.605011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:12:54.605381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:54.605436Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:12:54.605532Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:12:54.605574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:12:54.605631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:12:54.605666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:12:54.605716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:12:54.605762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:12:54.605802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:12:54.605835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:12:54.605905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:12:54.605954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:12:54.605988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:12:54.608061Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:12:54.610750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:12:54.610806Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 409546, at schemeshard: 72057594046678944 2025-04-17T17:12:59.252796Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:1 129 -> 240 2025-04-17T17:12:59.281271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:1, at schemeshard: 72057594046678944 2025-04-17T17:12:59.283040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:1, at schemeshard: 72057594046678944 2025-04-17T17:12:59.283916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:1, at schemeshard: 72057594046678944 2025-04-17T17:12:59.283950Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:1 ProgressState 2025-04-17T17:12:59.289118Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:1 progress is 4/4 2025-04-17T17:12:59.289144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 4/4 2025-04-17T17:12:59.289470Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:1 progress is 4/4 2025-04-17T17:12:59.289489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 4/4 2025-04-17T17:12:59.289511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 4/4, is published: true 2025-04-17T17:12:59.289866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:338:2317] message: TxId: 103 2025-04-17T17:12:59.290520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 4/4 2025-04-17T17:12:59.291193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-17T17:12:59.291514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-17T17:12:59.291849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T17:12:59.298582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:1 2025-04-17T17:12:59.298618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:1 2025-04-17T17:12:59.299000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T17:12:59.299324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:2 2025-04-17T17:12:59.299973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:2 2025-04-17T17:12:59.300009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-04-17T17:12:59.300027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:3 2025-04-17T17:12:59.300431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:3 2025-04-17T17:12:59.300470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-17T17:12:59.345998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-17T17:12:59.346368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:730:2634] TestWaitNotification: OK eventTxId 103 2025-04-17T17:12:59.384679Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/IncrBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T17:12:59.387708Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/IncrBackupImpl" took 2.04ms result status StatusSuccess 2025-04-17T17:12:59.390865Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/IncrBackupImpl" PathDescription { Self { Name: "IncrBackupImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupImpl" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__incremental_backup" Value: "{}" } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:12:59.406659Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/continuousBackupImpl/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T17:12:59.414047Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/continuousBackupImpl/streamImpl" took 7.08ms result status StatusSuccess 2025-04-17T17:12:59.420756Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/continuousBackupImpl/streamImpl" PathDescription { Self { Name: "streamImpl" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeStreamImpl Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409548 } PersQueueGroup { Name: "streamImpl" PathId: 4 TotalGroupCount: 1 PartitionPerTablet: 2 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "continuousBackupImpl" TopicPath: "/MyRoot/Table/continuousBackupImpl/streamImpl" YdbDatabasePath: "/MyRoot" PartitionKeySchema { Name: "key" TypeId: 4 } MeteringMode: METERING_MODE_REQUEST_UNITS OffloadConfig { IncrementalBackup { DstPath: "/MyRoot/IncrBackupImpl" DstPathId { OwnerId: 72057594046678944 LocalId: 5 } } } } Partitions { PartitionId: 0 TabletId: 72075186233409547 Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409548 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:12:59.427399Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/IncrBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T17:12:59.427668Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/IncrBackupImpl" took 242us result status StatusSuccess 2025-04-17T17:12:59.428071Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/IncrBackupImpl" PathDescription { Self { Name: "IncrBackupImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupImpl" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__incremental_backup" Value: "{}" } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed [GOOD] >> TGRpcCmsTest::DescribeOptionsTest [GOOD] >> DataShardOutOfOrder::TestShardSnapshotReadNoEarlyReply >> TReplicationTests::CreateWithoutCredentials [GOOD] >> TReplicationTests::Describe ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::CrossShard_4_OneToAll [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:113:2057] recipient: [1:107:2139] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:113:2057] recipient: [1:107:2139] Leader for TabletID 9437184 is [1:126:2151] sender: [1:129:2057] recipient: [1:107:2139] 2025-04-17T17:10:22.327795Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:10:22.351164Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:10:22.351559Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:126:2151] 2025-04-17T17:10:22.351789Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:10:22.393769Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:10:22.814714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:10:22.815120Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:10:22.883880Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:10:22.894121Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:10:22.914738Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-17T17:10:22.915154Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-17T17:10:22.915758Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-17T17:10:22.921034Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:10:22.921574Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:10:22.922009Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:195:2151] in generation 2 Leader for TabletID 9437184 is [1:126:2151] sender: [1:211:2057] recipient: [1:14:2061] 2025-04-17T17:10:23.245017Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:10:23.316121Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-17T17:10:23.317720Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:10:23.318671Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:216:2214] 2025-04-17T17:10:23.319402Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-17T17:10:23.319974Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-17T17:10:23.320369Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:10:23.323753Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:10:23.324626Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:10:23.325601Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-17T17:10:23.326969Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-17T17:10:23.327701Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T17:10:23.328502Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:10:23.329069Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-17T17:10:23.329578Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T17:10:23.329791Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T17:10:23.330328Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-17T17:10:23.330529Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T17:10:23.331488Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:212:2211], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:10:23.331688Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:10:23.332261Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:210:2210], serverId# [1:212:2211], sessionId# [0:0:0] 2025-04-17T17:10:23.356903Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:126:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-17T17:10:23.356966Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:10:23.357038Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T17:10:23.357195Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-17T17:10:23.357239Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-17T17:10:23.357301Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-17T17:10:23.357852Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T17:10:23.358438Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-17T17:10:23.358661Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-17T17:10:23.358710Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T17:10:23.358958Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-17T17:10:23.358982Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-17T17:10:23.359009Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-17T17:10:23.359046Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-17T17:10:23.359088Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-17T17:10:23.359108Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-17T17:10:23.359138Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-17T17:10:23.359177Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-17T17:10:23.359205Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-17T17:10:23.385094Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T17:10:23.385451Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T17:10:23.385852Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-17T17:10:23.385897Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-17T17:10:23.385981Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-17T17:10:23.386516Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:222:2220], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:10:23.386564Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:10:23.386623Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:221:2219], serverId# [1:222:2220], sessionId# [0:0:0] 2025-04-17T17:10:23.386761Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:126:2151]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2025-04-17T17:10:23.386790Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:10:23.386948Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2025-04-17T17:10:23.386992Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-17T17:10:23.387034Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2025-04-17T17:10:23.387061Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit PlanQueue 2025-04-17T17:10:23.404660Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2025-04-17T17:10:23.405390Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:10:23.407219Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:10:23.407250Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:10:23.407633Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T17:10:23.407825Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:10:23.408265Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T17:10:23.408460Z node 1 :TX_DATASHARD DEBUG: Found ready operation [2:1] in PlanQueue unit at 9437184 2025-04-17T17:10:23.408861Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit PlanQueue 2025-04-17T17:10:23.409094Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-17T17:10:23.409502Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit PlanQueue 2025-04-17T17:10:23.409889Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit LoadTxDetails 2025-04-17T17:10:23.410434Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit LoadTxDetails 2025-04-17T17:10:23.411583Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 2:1 keys extracted: 0 2025-04-17T17:10:23.411799Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-17T17:10:23.412003Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit LoadTxDetails 2025-04-17T17:10:23.412020Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-17T17:10:23.412035Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-17T17:10:23.412310Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T17:10:23.412334Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-17T17:10:23.412362Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T17:10:23.412739Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T17:10:23.412772Z node 1 :TX_DATASHARD TRACE: Operation [2:1] is the new logically complete end at 9437184 2025-04-17T17:10:23.413103Z node 1 :TX_DATASHARD TRAC ... 7:12:58.744066Z node 34 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is DelayComplete 2025-04-17T17:12:58.744087Z node 34 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit CompleteOperation 2025-04-17T17:12:58.748340Z node 34 :TX_DATASHARD TRACE: Add [7:6] at 9437186 to execution unit CompletedOperations 2025-04-17T17:12:58.748394Z node 34 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit CompletedOperations 2025-04-17T17:12:58.748438Z node 34 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is Executed 2025-04-17T17:12:58.748467Z node 34 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit CompletedOperations 2025-04-17T17:12:58.748494Z node 34 :TX_DATASHARD TRACE: Execution plan for [7:6] at 9437186 has finished 2025-04-17T17:12:58.748523Z node 34 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:12:58.748548Z node 34 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437186 2025-04-17T17:12:58.748578Z node 34 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437186 has no attached operations 2025-04-17T17:12:58.748607Z node 34 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437186 2025-04-17T17:12:58.748766Z node 34 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [34:99:2134], Recipient [34:347:2314]: {TEvPlanStep step# 7 MediatorId# 0 TabletID 9437185} 2025-04-17T17:12:58.748810Z node 34 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:12:58.748876Z node 34 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437185 on unit WaitForPlan 2025-04-17T17:12:58.748903Z node 34 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437185 is Executed 2025-04-17T17:12:58.748928Z node 34 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437185 executing on unit WaitForPlan 2025-04-17T17:12:58.748952Z node 34 :TX_DATASHARD TRACE: Add [7:6] at 9437185 to execution unit PlanQueue 2025-04-17T17:12:58.749060Z node 34 :TX_DATASHARD DEBUG: Planned transaction txId 6 at step 7 at tablet 9437185 { Transactions { TxId: 6 AckTo { RawX1: 99 RawX2: 146028890198 } } Step: 7 MediatorID: 0 TabletID: 9437185 } 2025-04-17T17:12:58.749083Z node 34 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2025-04-17T17:12:58.749198Z node 34 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [34:347:2314], Recipient [34:347:2314]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:12:58.749223Z node 34 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:12:58.749254Z node 34 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437185 2025-04-17T17:12:58.749277Z node 34 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:12:58.749297Z node 34 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-04-17T17:12:58.749322Z node 34 :TX_DATASHARD DEBUG: Found ready operation [7:6] in PlanQueue unit at 9437185 2025-04-17T17:12:58.749357Z node 34 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437185 on unit PlanQueue 2025-04-17T17:12:58.749381Z node 34 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437185 is Executed 2025-04-17T17:12:58.749401Z node 34 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437185 executing on unit PlanQueue 2025-04-17T17:12:58.749423Z node 34 :TX_DATASHARD TRACE: Add [7:6] at 9437185 to execution unit LoadTxDetails 2025-04-17T17:12:58.749446Z node 34 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437185 on unit LoadTxDetails 2025-04-17T17:12:58.751090Z node 34 :TX_DATASHARD DEBUG: LoadTxDetails at 9437185 loaded tx from db 7:6 keys extracted: 1 2025-04-17T17:12:58.751125Z node 34 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437185 is Executed 2025-04-17T17:12:58.751148Z node 34 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437185 executing on unit LoadTxDetails 2025-04-17T17:12:58.751169Z node 34 :TX_DATASHARD TRACE: Add [7:6] at 9437185 to execution unit FinalizeDataTxPlan 2025-04-17T17:12:58.751190Z node 34 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437185 on unit FinalizeDataTxPlan 2025-04-17T17:12:58.751229Z node 34 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437185 is Executed 2025-04-17T17:12:58.751252Z node 34 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437185 executing on unit FinalizeDataTxPlan 2025-04-17T17:12:58.751272Z node 34 :TX_DATASHARD TRACE: Add [7:6] at 9437185 to execution unit BuildAndWaitDependencies 2025-04-17T17:12:58.751291Z node 34 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437185 on unit BuildAndWaitDependencies 2025-04-17T17:12:58.751324Z node 34 :TX_DATASHARD TRACE: Operation [7:6] is the new logically complete end at 9437185 2025-04-17T17:12:58.751349Z node 34 :TX_DATASHARD TRACE: Operation [7:6] is the new logically incomplete end at 9437185 2025-04-17T17:12:58.751371Z node 34 :TX_DATASHARD TRACE: Activated operation [7:6] at 9437185 2025-04-17T17:12:58.751399Z node 34 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437185 is Executed 2025-04-17T17:12:58.751419Z node 34 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437185 executing on unit BuildAndWaitDependencies 2025-04-17T17:12:58.751438Z node 34 :TX_DATASHARD TRACE: Add [7:6] at 9437185 to execution unit BuildDataTxOutRS 2025-04-17T17:12:58.751456Z node 34 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437185 on unit BuildDataTxOutRS 2025-04-17T17:12:58.751489Z node 34 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437185 is Executed 2025-04-17T17:12:58.751508Z node 34 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437185 executing on unit BuildDataTxOutRS 2025-04-17T17:12:58.751528Z node 34 :TX_DATASHARD TRACE: Add [7:6] at 9437185 to execution unit StoreAndSendOutRS 2025-04-17T17:12:58.751548Z node 34 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437185 on unit StoreAndSendOutRS 2025-04-17T17:12:58.751570Z node 34 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437185 is Executed 2025-04-17T17:12:58.751589Z node 34 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437185 executing on unit StoreAndSendOutRS 2025-04-17T17:12:58.751608Z node 34 :TX_DATASHARD TRACE: Add [7:6] at 9437185 to execution unit PrepareDataTxInRS 2025-04-17T17:12:58.751627Z node 34 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437185 on unit PrepareDataTxInRS 2025-04-17T17:12:58.751648Z node 34 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437185 is Executed 2025-04-17T17:12:58.751680Z node 34 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437185 executing on unit PrepareDataTxInRS 2025-04-17T17:12:58.751699Z node 34 :TX_DATASHARD TRACE: Add [7:6] at 9437185 to execution unit LoadAndWaitInRS 2025-04-17T17:12:58.751720Z node 34 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437185 on unit LoadAndWaitInRS 2025-04-17T17:12:58.751739Z node 34 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437185 is Executed 2025-04-17T17:12:58.751761Z node 34 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437185 executing on unit LoadAndWaitInRS 2025-04-17T17:12:58.751780Z node 34 :TX_DATASHARD TRACE: Add [7:6] at 9437185 to execution unit ExecuteDataTx 2025-04-17T17:12:58.751798Z node 34 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437185 on unit ExecuteDataTx 2025-04-17T17:12:58.752006Z node 34 :TX_DATASHARD TRACE: Executed operation [7:6] at tablet 9437185 with status COMPLETE 2025-04-17T17:12:58.752046Z node 34 :TX_DATASHARD TRACE: Datashard execution counters for [7:6] at 9437185: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 1, SelectRowBytes: 10, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-17T17:12:58.752079Z node 34 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437185 is Executed 2025-04-17T17:12:58.752098Z node 34 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437185 executing on unit ExecuteDataTx 2025-04-17T17:12:58.752131Z node 34 :TX_DATASHARD TRACE: Add [7:6] at 9437185 to execution unit CompleteOperation 2025-04-17T17:12:58.752152Z node 34 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437185 on unit CompleteOperation 2025-04-17T17:12:58.752284Z node 34 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437185 is DelayComplete 2025-04-17T17:12:58.752307Z node 34 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437185 executing on unit CompleteOperation 2025-04-17T17:12:58.752331Z node 34 :TX_DATASHARD TRACE: Add [7:6] at 9437185 to execution unit CompletedOperations 2025-04-17T17:12:58.761130Z node 34 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437185 on unit CompletedOperations 2025-04-17T17:12:58.761209Z node 34 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437185 is Executed 2025-04-17T17:12:58.761239Z node 34 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437185 executing on unit CompletedOperations 2025-04-17T17:12:58.761267Z node 34 :TX_DATASHARD TRACE: Execution plan for [7:6] at 9437185 has finished 2025-04-17T17:12:58.761298Z node 34 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:12:58.761327Z node 34 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-04-17T17:12:58.761352Z node 34 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2025-04-17T17:12:58.761378Z node 34 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2025-04-17T17:12:58.785043Z node 34 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437185 step# 7 txid# 6} 2025-04-17T17:12:58.785105Z node 34 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 7} 2025-04-17T17:12:58.785155Z node 34 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-04-17T17:12:58.785191Z node 34 :TX_DATASHARD TRACE: Complete execution for [7:6] at 9437185 on unit CompleteOperation 2025-04-17T17:12:58.785257Z node 34 :TX_DATASHARD DEBUG: Complete [7 : 6] from 9437185 at tablet 9437185 send result to client [34:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T17:12:58.785299Z node 34 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2025-04-17T17:12:58.785530Z node 34 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 7 txid# 6} 2025-04-17T17:12:58.785559Z node 34 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 7} 2025-04-17T17:12:58.785593Z node 34 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T17:12:58.785618Z node 34 :TX_DATASHARD TRACE: Complete execution for [7:6] at 9437184 on unit CompleteOperation 2025-04-17T17:12:58.785655Z node 34 :TX_DATASHARD DEBUG: Complete [7 : 6] from 9437184 at tablet 9437184 send result to client [34:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T17:12:58.785682Z node 34 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:12:58.785813Z node 34 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437186 step# 7 txid# 6} 2025-04-17T17:12:58.785837Z node 34 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437186 step# 7} 2025-04-17T17:12:58.785865Z node 34 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T17:12:58.785887Z node 34 :TX_DATASHARD TRACE: Complete execution for [7:6] at 9437186 on unit CompleteOperation 2025-04-17T17:12:58.785925Z node 34 :TX_DATASHARD DEBUG: Complete [7 : 6] from 9437186 at tablet 9437186 send result to client [34:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T17:12:58.785951Z node 34 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 |75.5%| [TA] $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/test-results/unittest/{meta.json ... results_accumulator.log} >> TopicAutoscaling::CommitTopPast [GOOD] >> TopicAutoscaling::ControlPlane_BackCompatibility |75.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |75.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |75.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/test-results/unittest/{meta.json ... results_accumulator.log} |75.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::DescribeOptionsTest [GOOD] Test command err: 2025-04-17T17:12:50.906000Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494331257117454557:2212];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:12:50.906512Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/0021b4/r3tmp/tmpkJFtts/pdisk_1.dat 2025-04-17T17:12:51.796961Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:12:51.918069Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:12:51.918179Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:12:51.920507Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:51.930178Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6670, node 1 2025-04-17T17:12:52.128769Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:12:52.128802Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:12:52.128810Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:12:52.128933Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61529 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:12:53.195459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:12:53.238516Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:61529 2025-04-17T17:12:53.476775Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) is now locking 2025-04-17T17:12:53.476823Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) is now locked by parent 2025-04-17T17:12:53.484825Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) is now active 2025-04-17T17:12:53.535225Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285140, Sender [1:7494331270002357096:2316], Recipient [1:7494331261412422171:2201]: NKikimr::NConsole::TEvConsole::TEvDescribeTenantOptionsRequest { Request { } UserToken: "" } 2025-04-17T17:12:53.535283Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvDescribeTenantOptionsRequest 2025-04-17T17:12:53.537838Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvDescribeTenantOptionsResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.DescribeDatabaseOptionsResult] { storage_units { kind: "hdd2" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "hdd" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "hdd1" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "ssd" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "test" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } availability_zones { name: "dc-1" labels { key: "collocation" value: "disabled" } labels { key: "fixed_data_center" value: "DC-1" } } availability_zones { name: "any" labels { key: "any_data_center" value: "true" } labels { key: "collocation" value: "disabled" } } computational_units { kind: "slot" labels { key: "slot_type" value: "default" } labels { key: "type" value: "dynamic_slot" } allowed_availability_zones: "any" allowed_availability_zones: "dc-1" } } } } } >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock-EvWrite >> DataShardOutOfOrder::TestShardRestartNoUndeterminedImmediate >> DataShardTxOrder::ImmediateBetweenOnline_oo8 >> DataShardTxOrder::ForceOnlineBetweenOnline_oo8 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-04-17T17:12:16.788461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:12:16.788525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:12:16.788552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:12:16.788578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:12:16.788662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:12:16.788685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:12:16.788733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:12:16.788812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:12:16.789052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:12:18.345005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:12:18.345534Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:12:18.584899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:12:18.586036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:12:18.587631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:12:18.807730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:12:18.834182Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:12:18.860267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:12:18.860518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:12:18.933507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:12:18.955114Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:12:18.955754Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:12:18.969529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:12:18.969889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:12:18.970250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:12:18.985200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:12:19.163291Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-04-17T17:12:20.435066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:12:20.437166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:20.444377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:12:20.444569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:12:20.444622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:20.459117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:12:20.462105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:12:20.463036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:20.463102Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:12:20.463134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:12:20.463158Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:12:20.464991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:20.465052Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:12:20.465094Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:12:20.466571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:20.466607Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:20.466664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:12:20.466715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:12:20.469773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:12:20.497322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:12:20.499108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:12:20.505978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:12:20.507297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:12:20.507953Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:12:20.510607Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:12:20.511169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:12:20.512371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:12:20.513284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:12:20.530272Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:12:20.530330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:12:20.530466Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:12:20.530501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:12:20.532518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:20.533100Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:12:20.535455Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:12:20.535899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:12:20.536575Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:12:20.536819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:12:20.537340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:12:20.537567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:12:20.537828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:12:20.538316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:12:20.538571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:12:20.539288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:12:20.539705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:12:20.551579Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:12:20.551811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:12:20.551845Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 94046678944 2025-04-17T17:13:02.608415Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976710763:0, at schemeshard: 72057594046678944 2025-04-17T17:13:02.609016Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710763 ready parts: 1/1 2025-04-17T17:13:02.610998Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710763 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:13:02.616117Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-17T17:13:02.616207Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-17T17:13:02.616239Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2025-04-17T17:13:02.616574Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2025-04-17T17:13:02.616609Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-17T17:13:02.622121Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-17T17:13:02.623169Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-17T17:13:02.623198Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2025-04-17T17:13:02.623225Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 11 2025-04-17T17:13:02.623704Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-17T17:13:02.623775Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-04-17T17:13:02.644150Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T17:13:02.644306Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710763, at schemeshard: 72057594046678944 2025-04-17T17:13:02.644679Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-04-17T17:13:02.644993Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710763, at schemeshard: 72057594046678944 2025-04-17T17:13:02.645430Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710763:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710763 msg type: 269090816 2025-04-17T17:13:02.645918Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710763, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710763 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710763 at step: 5000009 2025-04-17T17:13:02.647887Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000009, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:13:02.647994Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710763 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 17179871341 } } Step: 5000009 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:13:02.648435Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710763:0, step: 5000009, at schemeshard: 72057594046678944 2025-04-17T17:13:02.648899Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710763:0, at schemeshard: 72057594046678944 2025-04-17T17:13:02.649893Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2025-04-17T17:13:02.650674Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-04-17T17:13:02.651376Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2025-04-17T17:13:02.659244Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-04-17T17:13:02.659357Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-17T17:13:02.659779Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-17T17:13:02.668970Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: false 2025-04-17T17:13:02.669081Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-04-17T17:13:02.669442Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710763:0 2025-04-17T17:13:02.669853Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710763:0 2025-04-17T17:13:02.670610Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-17T17:13:02.670649Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710763, publications: 2, subscribers: 1 2025-04-17T17:13:02.670961Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 1], 13 2025-04-17T17:13:02.671371Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-04-17T17:13:02.673257Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-04-17T17:13:02.673953Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-04-17T17:13:02.723749Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:13:02.724087Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:13:02.724262Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-17T17:13:02.724695Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:13:02.725313Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:208:2210], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 1 2025-04-17T17:13:02.725356Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:208:2210], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710763 2025-04-17T17:13:02.744004Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-17T17:13:02.745461Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-17T17:13:02.745514Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710763 2025-04-17T17:13:02.746438Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2025-04-17T17:13:02.746796Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-17T17:13:02.749124Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-17T17:13:02.749899Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-17T17:13:02.750323Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710763 2025-04-17T17:13:02.750350Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-17T17:13:02.750378Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-17T17:13:02.750858Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710763, subscribers: 1 2025-04-17T17:13:02.750905Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:124:2150] 2025-04-17T17:13:02.841811Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-04-17T17:13:02.843160Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-04-17T17:13:02.843268Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2025-04-17T17:13:02.843334Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710763 2025-04-17T17:13:02.843383Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T17:13:02.843410Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763 2025-04-17T17:13:02.843451Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763, id# 103, itemIdx# 4294967295 2025-04-17T17:13:02.854861Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T17:13:02.854969Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-17T17:13:02.855020Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [4:775:2710] TestWaitNotification: OK eventTxId 103 >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop+UseSink >> TopicAutoscaling::PartitionSplit_ManySession_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_PQv1 >> DataShardOutOfOrder::TestSnapshotReadAfterStuckRW >> TReplicationTests::Describe [GOOD] >> TReplicationTests::CreateReplicatedTable |75.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |75.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |75.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream >> TopicAutoscaling::Simple_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_BeforeAutoscaleAwareSDK >> Balancing::Balancing_OneTopic_TopicApi [GOOD] >> Balancing::Balancing_OneTopic_PQv1 >> TopicAutoscaling::Simple_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::Simple_PQv1 >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_AutoscaleAwareSDK >> SystemView::GroupsFields [GOOD] >> SystemView::Describe >> DataShardTxOrder::ImmediateBetweenOnline_Init >> DataShardOutOfOrder::TestReadTableImmediateWriteBlock >> TopicAutoscaling::PartitionSplit_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_PreferedPartition_BeforeAutoscaleAwareSDK >> YdbSdkSessionsPool::CustomPlan >> DataShardReadIterator::ShouldReceiveErrorAfterSplit [GOOD] >> DataShardReadIterator::ShouldReceiveErrorAfterSplitWhenExhausted >> ResourcePoolsDdl::TestDefaultPoolRestrictions [GOOD] >> ResourcePoolsDdl::TestAlterResourcePool >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] [GOOD] >> DataShardReadIterator::ShouldRangeReadReverseLeftInclusive [GOOD] >> DataShardReadIterator::ShouldNotReadAfterCancel >> DataShardReadIterator::ShouldReadKeyCellVec [GOOD] >> DataShardReadIterator::ShouldReadKeyArrow >> DataShardReadIterator::ShouldReverseReadMultipleKeys [GOOD] >> DataShardReadIterator::ShouldReverseReadMultipleKeysOneByOne >> TopicAutoscaling::PartitionSplit_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_AutoscaleAwareSDK >> TReplicationTests::CreateReplicatedTable [GOOD] >> TReplicationTests::DropReplicationWithInvalidCredentials ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:13:11.348741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:13:11.348823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:13:11.348864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:13:11.348894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:13:11.348941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:13:11.348962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:13:11.349003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:13:11.352801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:13:11.357099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:13:11.533910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:13:11.533963Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:13:11.569809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:13:11.570239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:13:11.570379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:13:11.592310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:13:11.592481Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:13:11.593107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:13:11.593317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:13:11.596011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:13:11.597228Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:13:11.597298Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:13:11.597463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:13:11.597500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:13:11.597538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:13:11.597652Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:13:11.604077Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:13:11.769925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:13:11.770160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:11.770389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:13:11.770625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:13:11.770679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:11.773107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:13:11.773247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:13:11.773434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:11.773480Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:13:11.773514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:13:11.773570Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:13:11.775752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:11.775811Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:13:11.775845Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:13:11.777819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:11.777866Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:11.777904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:13:11.777952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:13:11.788368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:13:11.790500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:13:11.790735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:13:11.791683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:13:11.791840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:13:11.791890Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:13:11.792181Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:13:11.792243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:13:11.792407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:13:11.792502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:13:11.794567Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:13:11.794608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:13:11.794842Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:13:11.794918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:13:11.795255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:11.795299Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:13:11.795403Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:13:11.795437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:13:11.795470Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:13:11.795498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:13:11.795542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:13:11.795587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:13:11.795618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:13:11.795644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:13:11.795708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:13:11.795756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:13:11.795799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:13:11.804722Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:13:11.804885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:13:11.804951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... d: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:13:12.579381Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:13:12.579430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-17T17:13:12.579974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:13:12.580056Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:13:12.580748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:13:12.580837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:13:12.580865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-17T17:13:12.580904Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-17T17:13:12.580958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-17T17:13:12.581031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:18069 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 4CF8BC2B-31CF-4D71-86F4-7676147D4DFF amz-sdk-request: attempt=1 content-length: 638 content-md5: Myp3UygaBNGp6+7AMgyRnQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 638 REQUEST: PUT /data_01.csv.zst HTTP/1.1 HEADERS: Host: localhost:18069 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A7362CE1-B567-4BCB-B046-A95E2D989041 amz-sdk-request: attempt=1 content-length: 20 content-md5: 8NOHH1ycwPXC5K+v+37u8g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_01.csv.zst / / 20 FAKE_COORDINATOR: Erasing txId 102 2025-04-17T17:13:12.600477Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:481:2440], result# PutObjectResult { ETag: 332a7753281a04d1a9ebeec0320c919d } 2025-04-17T17:13:12.600591Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:483:2441], result# PutObjectResult { ETag: f0d3871f5c9cc0f5c2e4afaffb7eeef2 } 2025-04-17T17:13:12.600633Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:483:2441], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-04-17T17:13:12.600904Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:480:2438] 2025-04-17T17:13:12.601053Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:482:2439], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-04-17T17:13:12.601318Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:481:2440], sender# [1:480:2438], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } 2025-04-17T17:13:12.618954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:18069 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F3C19D13-7B18-4A65-B9D1-443B0DFC4DAD amz-sdk-request: attempt=1 content-length: 20 content-md5: 2qFn9G0TW8wfvJ9C+A5Jbw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 20 2025-04-17T17:13:12.622546Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:481:2440], result# PutObjectResult { ETag: daa167f46d135bcc1fbc9f42f80e496f } 2025-04-17T17:13:12.622612Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:481:2440], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-04-17T17:13:12.632835Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:480:2438], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-04-17T17:13:12.647272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 320 RawX2: 4294969600 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-17T17:13:12.647335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-17T17:13:12.647491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 320 RawX2: 4294969600 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-17T17:13:12.647581Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 320 RawX2: 4294969600 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-17T17:13:12.647640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T17:13:12.647802Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:13:12.648525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 324 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-17T17:13:12.648577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-17T17:13:12.648698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 324 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-17T17:13:12.648772Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 324 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-17T17:13:12.648809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T17:13:12.648849Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:13:12.648884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-17T17:13:12.648921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-17T17:13:12.648944Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-17T17:13:12.649072Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:13:12.662643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:13:12.662974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:13:12.663417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:13:12.663468Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-17T17:13:12.663561Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:13:12.663594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:13:12.663634Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:13:12.663666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:13:12.663701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-17T17:13:12.663789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:372:2339] message: TxId: 102 2025-04-17T17:13:12.663847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:13:12.663884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-17T17:13:12.663916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-17T17:13:12.664050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T17:13:12.677398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T17:13:12.677460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:456:2416] TestWaitNotification: OK eventTxId 102 >> SystemView::AuthGroups_ResultOrder [GOOD] >> SystemView::AuthGroups_TableRange >> DataShardReadIterator::ShouldReadRangeCellVec [GOOD] >> DataShardReadIterator::ShouldReadRangeArrow >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK_AutoCommit |75.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |75.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |75.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat >> TReplicationTests::DropReplicationWithInvalidCredentials [GOOD] >> TReplicationTests::DropReplicationWithUnknownSecret >> DataShardReadIteratorBatchMode::SelectingColumns [GOOD] >> DataShardReadIteratorBatchMode::ShouldHandleReadAck |75.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |75.6%| [LD] {RESULT} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |75.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut >> YdbSdkSessionsPool::WaitQueue1 >> SystemView::PartitionStatsOneSchemeShardDataQuery [GOOD] >> SystemView::PgTablesOneSchemeShardDataQuery >> DataShardTxOrder::ImmediateBetweenOnline_oo8 [GOOD] >> ShowCreateView::WithTwoTablePathPrefixes [GOOD] >> SystemView::AuthGroups >> DataShardOutOfOrder::TestShardRestartNoUndeterminedImmediate [GOOD] >> DataShardOutOfOrder::TestShardRestartDuringWaitingRead >> TReplicationTests::DropReplicationWithUnknownSecret [GOOD] >> DataShardOutOfOrder::TestShardSnapshotReadNoEarlyReply [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock+EvWrite >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock-EvWrite [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLockOutOfOrder >> SystemView::ShowCreateTablePartitionByHash [GOOD] >> SystemView::ShowCreateTablePartitionSettings ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_oo8 [GOOD] Test command err: 2025-04-17T17:13:09.721420Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:13:09.745016Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:13:09.745384Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:126:2151] 2025-04-17T17:13:09.745559Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:13:09.790545Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:13:09.796169Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:13:09.881625Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:13:09.883263Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-17T17:13:09.883354Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-17T17:13:09.883453Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-17T17:13:09.883794Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:13:09.884328Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:13:09.884403Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:189:2151] in generation 2 2025-04-17T17:13:09.884756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:13:09.884844Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:13:09.955365Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:13:09.993977Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-17T17:13:09.994170Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:13:09.994273Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-04-17T17:13:09.994315Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-17T17:13:09.994361Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-17T17:13:09.994400Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:13:09.994546Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:09.994596Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:09.994881Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-17T17:13:09.994975Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-17T17:13:09.995097Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T17:13:09.995134Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:13:09.995184Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-17T17:13:09.995229Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T17:13:09.995277Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T17:13:09.995311Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-17T17:13:09.995369Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T17:13:09.995473Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:09.995507Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:09.995554Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-04-17T17:13:10.004556Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:126:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-17T17:13:10.004634Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:13:10.004746Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T17:13:10.004885Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-17T17:13:10.004925Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-17T17:13:10.004993Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-17T17:13:10.005058Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T17:13:10.005092Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-17T17:13:10.005137Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-17T17:13:10.005180Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T17:13:10.005489Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-17T17:13:10.005520Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-17T17:13:10.005548Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-17T17:13:10.005578Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-17T17:13:10.005623Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-17T17:13:10.005655Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-17T17:13:10.005690Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-17T17:13:10.005718Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-17T17:13:10.005742Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-17T17:13:10.020201Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T17:13:10.020278Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T17:13:10.020313Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-17T17:13:10.020352Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-17T17:13:10.020410Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-17T17:13:10.021025Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:10.021080Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:10.021120Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-04-17T17:13:10.021195Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:126:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-17T17:13:10.021238Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:13:10.021377Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-17T17:13:10.021415Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T17:13:10.021469Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-17T17:13:10.021502Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-17T17:13:10.036817Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-17T17:13:10.036911Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:13:10.037169Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:10.037218Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:10.037285Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T17:13:10.037324Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:13:10.037357Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T17:13:10.037417Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-17T17:13:10.037453Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-17T17:13:10.037506Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T17:13:10.037542Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-17T17:13:10.037594Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-17T17:13:10.037629Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-17T17:13:10.037813Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-17T17:13:10.037851Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T17:13:10.037877Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-17T17:13:10.037900Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-17T17:13:10.037924Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-17T17:13:10.037996Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T17:13:10.038019Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-17T17:13:10.038057Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T17:13:10.038099Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T17:13:10.038161Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-17T17:13:10.038203Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-17T17:13:10.038236Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-17T17:13:10.038291Z node 1 :TX_DATA ... m 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-04-17T17:13:17.304682Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-04-17T17:13:17.304709Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T17:13:17.304790Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T17:13:17.304812Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:137] at 9437186 on unit CompleteOperation 2025-04-17T17:13:17.304841Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 137] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-04-17T17:13:17.304904Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-04-17T17:13:17.304930Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T17:13:17.305017Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T17:13:17.305056Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:140] at 9437186 on unit CompleteOperation 2025-04-17T17:13:17.305090Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 140] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-17T17:13:17.305140Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-04-17T17:13:17.305166Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T17:13:17.305269Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T17:13:17.305292Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:143] at 9437186 on unit CompleteOperation 2025-04-17T17:13:17.305322Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 143] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-17T17:13:17.305356Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-04-17T17:13:17.305387Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T17:13:17.305499Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T17:13:17.305524Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:146] at 9437186 on unit CompleteOperation 2025-04-17T17:13:17.305553Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 146] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-17T17:13:17.305578Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T17:13:17.305664Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T17:13:17.305685Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2025-04-17T17:13:17.305711Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-17T17:13:17.305731Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T17:13:17.305808Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T17:13:17.305828Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2025-04-17T17:13:17.305879Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-17T17:13:17.305916Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T17:13:17.306153Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-04-17T17:13:17.306185Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:13:17.306216Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2025-04-17T17:13:17.306327Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 116 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 38} 2025-04-17T17:13:17.306359Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:13:17.306395Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 116 2025-04-17T17:13:17.306470Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 119 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 39} 2025-04-17T17:13:17.306498Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:13:17.306527Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 119 2025-04-17T17:13:17.306585Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-04-17T17:13:17.306615Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:13:17.306714Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2025-04-17T17:13:17.306807Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 122 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 40} 2025-04-17T17:13:17.306831Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:13:17.306850Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 122 2025-04-17T17:13:17.306897Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 125 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 41} 2025-04-17T17:13:17.306919Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:13:17.306939Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 125 2025-04-17T17:13:17.307007Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-04-17T17:13:17.307030Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:13:17.307049Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2025-04-17T17:13:17.307112Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-04-17T17:13:17.307134Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:13:17.307166Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-04-17T17:13:17.307236Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-17T17:13:17.307276Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:13:17.307301Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 2025-04-17T17:13:17.307372Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 128 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2025-04-17T17:13:17.307411Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:13:17.307445Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 128 2025-04-17T17:13:17.307533Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2025-04-17T17:13:17.307558Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:13:17.307585Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 131 2025-04-17T17:13:17.307639Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-04-17T17:13:17.307671Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:13:17.307715Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2025-04-17T17:13:17.307818Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-04-17T17:13:17.307846Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:13:17.307868Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2025-04-17T17:13:17.307935Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-04-17T17:13:17.307961Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:13:17.307983Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2025-04-17T17:13:17.308026Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-04-17T17:13:17.308047Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:13:17.308070Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 >> TopicAutoscaling::PartitionMerge_PreferedPartition_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionMerge_PreferedPartition_AutoscaleAwareSDK ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_replication/unittest >> TReplicationTests::DropReplicationWithUnknownSecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:12:49.964299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:12:49.964426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:12:49.964468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:12:49.964526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:12:49.964576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:12:49.964603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:12:49.964678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:12:49.964753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:12:49.965080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:12:50.049926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:12:50.049986Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:12:50.067799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:12:50.067924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:12:50.068065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:12:50.074447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:12:50.074742Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:12:50.075419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:12:50.075718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:12:50.079464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:12:50.080695Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:12:50.080765Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:12:50.080883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:12:50.080932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:12:50.080969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:12:50.081153Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:12:50.087941Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:12:50.234766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:12:50.235022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:50.235221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:12:50.235430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:12:50.235481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:50.238126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:12:50.238247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:12:50.238452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:50.238513Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:12:50.238546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:12:50.238591Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:12:50.241017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:50.241075Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:12:50.241118Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:12:50.243673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:50.243756Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:50.243822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:12:50.243893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:12:50.247490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:12:50.249921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:12:50.250115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:12:50.251134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:12:50.251273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:12:50.251322Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:12:50.251579Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:12:50.251633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:12:50.251808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:12:50.251967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:12:50.257063Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:12:50.257109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:12:50.257265Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:12:50.257313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:12:50.257590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:50.257624Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:12:50.257694Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:12:50.257728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:12:50.257761Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:12:50.257781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:12:50.257805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:12:50.257834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:12:50.257858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:12:50.257881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:12:50.257938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:12:50.257963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:12:50.257982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:12:50.259439Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:12:50.259542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:12:50.259577Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... E: StateWork, received event# 2146435072, Sender [9:124:2150], Recipient [9:124:2150]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-17T17:13:17.660146Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-17T17:13:17.660223Z node 9 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:13:17.660260Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:13:17.660389Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:13:17.660549Z node 9 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:13:17.660583Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [9:205:2207], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-17T17:13:17.660620Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [9:205:2207], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: Erasing txId 102 2025-04-17T17:13:17.661102Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:13:17.661154Z node 9 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-17T17:13:17.661300Z node 9 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-17T17:13:17.661337Z node 9 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:13:17.661385Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:13:17.661440Z node 9 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:13:17.661492Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:13:17.661540Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-04-17T17:13:17.661599Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:13:17.661655Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-17T17:13:17.661705Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-17T17:13:17.661860Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T17:13:17.661914Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-17T17:13:17.661959Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-17T17:13:17.662002Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-17T17:13:17.662850Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [9:205:2207], Recipient [9:124:2150]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 } 2025-04-17T17:13:17.662895Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-04-17T17:13:17.662970Z node 9 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:13:17.663045Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:13:17.663080Z node 9 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-17T17:13:17.663140Z node 9 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-17T17:13:17.663193Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:13:17.663297Z node 9 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-17T17:13:17.664485Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [9:205:2207], Recipient [9:124:2150]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 } 2025-04-17T17:13:17.664526Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-04-17T17:13:17.664590Z node 9 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:13:17.664670Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:13:17.664715Z node 9 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-17T17:13:17.664749Z node 9 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-17T17:13:17.664779Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T17:13:17.664883Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-17T17:13:17.664929Z node 9 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-17T17:13:17.666908Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435084, Sender [9:124:2150], Recipient [9:124:2150]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2025-04-17T17:13:17.666959Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2025-04-17T17:13:17.667037Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:13:17.667092Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T17:13:17.667174Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:13:17.671580Z node 9 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:13:17.672194Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T17:13:17.672235Z node 9 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:13:17.676359Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T17:13:17.676407Z node 9 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:13:17.676520Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-17T17:13:17.676819Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-17T17:13:17.676871Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-17T17:13:17.677328Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [9:447:2402], Recipient [9:124:2150]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:17.677395Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:17.677443Z node 9 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-17T17:13:17.677609Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [9:363:2342], Recipient [9:124:2150]: NKikimrScheme.TEvNotifyTxCompletion TxId: 102 2025-04-17T17:13:17.677650Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-17T17:13:17.677732Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-17T17:13:17.677846Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T17:13:17.677897Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [9:445:2400] 2025-04-17T17:13:17.678089Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [9:447:2402], Recipient [9:124:2150]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:13:17.678123Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:13:17.678167Z node 9 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2025-04-17T17:13:17.684843Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [9:448:2403], Recipient [9:124:2150]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-04-17T17:13:17.684923Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-17T17:13:17.685072Z node 9 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:13:17.685317Z node 9 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Replication" took 239us result status StatusPathDoesNotExist 2025-04-17T17:13:17.685486Z node 9 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Replication\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Replication" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> DataShardTxOrder::ZigZag_oo8_dirty [GOOD] >> DataShardReadIterator::ShouldReceiveErrorAfterSplitWhenExhausted [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKey+EvWrite >> DataShardReadIterator::ShouldNotReadAfterCancel [GOOD] >> DataShardReadIterator::ShouldLimitReadRangeChunk1Limit100 |75.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |75.6%| [LD] {RESULT} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |75.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |75.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |75.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |75.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop+UseSink [GOOD] >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop-UseSink |75.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |75.6%| [LD] {RESULT} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |75.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut >> DataShardReadIterator::ShouldReadKeyArrow [GOOD] >> DataShardReadIterator::ShouldReadKeyOnlyValueColumn >> DataShardOutOfOrder::TestSnapshotReadAfterStuckRW [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ZigZag_oo8_dirty [GOOD] Test command err: 2025-04-17T17:13:05.654251Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:13:05.728977Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:13:05.729439Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:126:2151] 2025-04-17T17:13:05.729709Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:13:05.897931Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:13:05.953929Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:13:06.217116Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:13:06.242708Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-17T17:13:06.242794Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-17T17:13:06.242893Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-17T17:13:06.243229Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:13:06.243698Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:13:06.243764Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:189:2151] in generation 2 2025-04-17T17:13:06.244040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:13:06.244067Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:13:06.867905Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:13:07.322774Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-17T17:13:07.323519Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:13:07.331002Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-04-17T17:13:07.331643Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-17T17:13:07.332241Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-17T17:13:07.332771Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:13:07.334445Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:07.335189Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:07.350612Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-17T17:13:07.350947Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-17T17:13:07.351902Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T17:13:07.361042Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:13:07.361638Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-17T17:13:07.362266Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T17:13:07.362615Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T17:13:07.362919Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-17T17:13:07.363240Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T17:13:07.372619Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:07.372941Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:07.373220Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-04-17T17:13:07.425883Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:126:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\001J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-17T17:13:07.425958Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:13:07.427486Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T17:13:07.440540Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-17T17:13:07.440860Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-17T17:13:07.441389Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-17T17:13:07.442134Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T17:13:07.442669Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-17T17:13:07.442710Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-17T17:13:07.442744Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T17:13:07.452733Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-17T17:13:07.452795Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-17T17:13:07.453075Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-17T17:13:07.453615Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-17T17:13:07.453928Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-17T17:13:07.454201Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-17T17:13:07.454481Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-17T17:13:07.454825Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-17T17:13:07.455090Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-17T17:13:07.481519Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T17:13:07.481591Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T17:13:07.481640Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-17T17:13:07.481676Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-17T17:13:07.481729Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-17T17:13:07.482227Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:07.482273Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:07.482309Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-04-17T17:13:07.482362Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:126:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-17T17:13:07.482385Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:13:07.482510Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-17T17:13:07.482548Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T17:13:07.482592Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-17T17:13:07.482625Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-17T17:13:07.498153Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-17T17:13:07.498239Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:13:07.498514Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:07.498554Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:07.498625Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T17:13:07.498665Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:13:07.498700Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T17:13:07.498746Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-17T17:13:07.498782Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-17T17:13:07.498829Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T17:13:07.498863Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-17T17:13:07.498922Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-17T17:13:07.498960Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-17T17:13:07.499137Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-17T17:13:07.499174Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T17:13:07.499200Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-17T17:13:07.499224Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-17T17:13:07.499245Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-17T17:13:07.499341Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T17:13:07.499367Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-17T17:13:07.499407Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T17:13:07.499440Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T17:13:07.499490Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-17T17:13:07.499538Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-17T17:13:07.499576Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-17T17:13:07.499615Z node 1 :TX_DATA ... CE: Advance execution plan for [1000016:45] at 9437184 executing on unit ExecuteDataTx 2025-04-17T17:13:19.450681Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompleteOperation 2025-04-17T17:13:19.450704Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompleteOperation 2025-04-17T17:13:19.450926Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is DelayComplete 2025-04-17T17:13:19.450959Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompleteOperation 2025-04-17T17:13:19.451004Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompletedOperations 2025-04-17T17:13:19.451037Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompletedOperations 2025-04-17T17:13:19.451066Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-17T17:13:19.451087Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompletedOperations 2025-04-17T17:13:19.451120Z node 2 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437184 has finished 2025-04-17T17:13:19.451161Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:13:19.451196Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T17:13:19.451252Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T17:13:19.451291Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T17:13:19.451544Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [2:99:2134], Recipient [2:343:2310]: {TEvPlanStep step# 1000016 MediatorId# 0 TabletID 9437185} 2025-04-17T17:13:19.451601Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:13:19.451693Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit WaitForPlan 2025-04-17T17:13:19.451725Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-17T17:13:19.451752Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit WaitForPlan 2025-04-17T17:13:19.451793Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit PlanQueue 2025-04-17T17:13:19.451924Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 45 at step 1000016 at tablet 9437185 { Transactions { TxId: 45 AckTo { RawX1: 99 RawX2: 8589936726 } } Step: 1000016 MediatorID: 0 TabletID: 9437185 } 2025-04-17T17:13:19.451952Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2025-04-17T17:13:19.460407Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:343:2310], Recipient [2:343:2310]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:19.460490Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:19.460558Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437185 2025-04-17T17:13:19.460608Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:13:19.460639Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-04-17T17:13:19.460675Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000016:45] in PlanQueue unit at 9437185 2025-04-17T17:13:19.460705Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit PlanQueue 2025-04-17T17:13:19.460739Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-17T17:13:19.460769Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit PlanQueue 2025-04-17T17:13:19.460815Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit LoadTxDetails 2025-04-17T17:13:19.460855Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit LoadTxDetails 2025-04-17T17:13:19.461551Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 9437185 loaded tx from db 1000016:45 keys extracted: 2 2025-04-17T17:13:19.461598Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-17T17:13:19.461626Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadTxDetails 2025-04-17T17:13:19.461672Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit FinalizeDataTxPlan 2025-04-17T17:13:19.461701Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit FinalizeDataTxPlan 2025-04-17T17:13:19.461744Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-17T17:13:19.461765Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit FinalizeDataTxPlan 2025-04-17T17:13:19.461788Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit BuildAndWaitDependencies 2025-04-17T17:13:19.461810Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit BuildAndWaitDependencies 2025-04-17T17:13:19.461865Z node 2 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically complete end at 9437185 2025-04-17T17:13:19.461896Z node 2 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically incomplete end at 9437185 2025-04-17T17:13:19.461924Z node 2 :TX_DATASHARD TRACE: Activated operation [1000016:45] at 9437185 2025-04-17T17:13:19.461959Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-17T17:13:19.461983Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit BuildAndWaitDependencies 2025-04-17T17:13:19.462005Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit BuildDataTxOutRS 2025-04-17T17:13:19.462027Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit BuildDataTxOutRS 2025-04-17T17:13:19.462074Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-17T17:13:19.462095Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit BuildDataTxOutRS 2025-04-17T17:13:19.462115Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit StoreAndSendOutRS 2025-04-17T17:13:19.462138Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit StoreAndSendOutRS 2025-04-17T17:13:19.462159Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-17T17:13:19.462176Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit StoreAndSendOutRS 2025-04-17T17:13:19.462195Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit PrepareDataTxInRS 2025-04-17T17:13:19.462213Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit PrepareDataTxInRS 2025-04-17T17:13:19.462241Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-17T17:13:19.462263Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit PrepareDataTxInRS 2025-04-17T17:13:19.462282Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit LoadAndWaitInRS 2025-04-17T17:13:19.462300Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit LoadAndWaitInRS 2025-04-17T17:13:19.462339Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-17T17:13:19.462361Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadAndWaitInRS 2025-04-17T17:13:19.462381Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit ExecuteDataTx 2025-04-17T17:13:19.462400Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit ExecuteDataTx 2025-04-17T17:13:19.462758Z node 2 :TX_DATASHARD TRACE: Executed operation [1000016:45] at tablet 9437185 with status COMPLETE 2025-04-17T17:13:19.462819Z node 2 :TX_DATASHARD TRACE: Datashard execution counters for [1000016:45] at 9437185: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-17T17:13:19.462867Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-17T17:13:19.462893Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit ExecuteDataTx 2025-04-17T17:13:19.462919Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompleteOperation 2025-04-17T17:13:19.462942Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompleteOperation 2025-04-17T17:13:19.463149Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is DelayComplete 2025-04-17T17:13:19.463174Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompleteOperation 2025-04-17T17:13:19.463200Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompletedOperations 2025-04-17T17:13:19.463235Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompletedOperations 2025-04-17T17:13:19.463263Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-17T17:13:19.463283Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompletedOperations 2025-04-17T17:13:19.463306Z node 2 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437185 has finished 2025-04-17T17:13:19.463333Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:13:19.463355Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-04-17T17:13:19.463378Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2025-04-17T17:13:19.463400Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2025-04-17T17:13:19.485331Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000016 txid# 45} 2025-04-17T17:13:19.485432Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000016} 2025-04-17T17:13:19.485496Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T17:13:19.485541Z node 2 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437184 on unit CompleteOperation 2025-04-17T17:13:19.485613Z node 2 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437184 at tablet 9437184 send result to client [2:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T17:13:19.485662Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:13:19.485989Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000016 txid# 45} 2025-04-17T17:13:19.486030Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000016} 2025-04-17T17:13:19.486075Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-04-17T17:13:19.486101Z node 2 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437185 on unit CompleteOperation 2025-04-17T17:13:19.486139Z node 2 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437185 at tablet 9437185 send result to client [2:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T17:13:19.486168Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 >> DataShardTxOrder::ForceOnlineBetweenOnline_oo8 [GOOD] >> DataShardOutOfOrder::TestReadTableImmediateWriteBlock [GOOD] >> DataShardOutOfOrder::TestReadTableSingleShardImmediate >> KqpBatchUpdate::SimpleOnePartition [GOOD] >> YdbSdkSessionsPool::Get1Session >> YdbSdkSessionsPool::PeriodicTask10 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSnapshotReadAfterStuckRW [GOOD] Test command err: 2025-04-17T17:13:15.686070Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:13:15.686622Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:13:15.686676Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/00244f/r3tmp/tmpk4yHd1/pdisk_1.dat 2025-04-17T17:13:16.121854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:13:16.182468Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:13:16.229763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:13:16.229886Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:13:16.241410Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:13:16.339917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:13:16.436570Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:13:16.437620Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:13:16.438048Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T17:13:16.438308Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:13:16.624566Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:13:16.626425Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:13:16.626526Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:13:16.628148Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:13:16.628229Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:13:16.628302Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:13:16.636917Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:13:16.637123Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:13:16.637213Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T17:13:16.648500Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:13:16.715542Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:13:16.715734Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:13:16.715872Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T17:13:16.715910Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:13:16.715947Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:13:16.716008Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:13:16.724352Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:16.724423Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:16.724712Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:13:16.724819Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:13:16.724923Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:13:16.724965Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:13:16.724998Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T17:13:16.725027Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:13:16.725056Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:13:16.725088Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:13:16.725124Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:13:16.725256Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:16.725292Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:16.725346Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T17:13:16.725697Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T17:13:16.725735Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:13:16.725835Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:13:16.726048Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T17:13:16.726115Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:13:16.726203Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:13:16.726242Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:13:16.726277Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T17:13:16.726323Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T17:13:16.726355Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:13:16.726640Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T17:13:16.726684Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T17:13:16.726720Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:13:16.726750Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:13:16.726797Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T17:13:16.726839Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:13:16.726887Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T17:13:16.726921Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:13:16.726943Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T17:13:16.729144Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T17:13:16.729191Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:13:16.747555Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:13:16.747626Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:13:16.747660Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:13:16.747722Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-17T17:13:16.747791Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T17:13:16.918142Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:16.918201Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:16.918241Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T17:13:16.918669Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-17T17:13:16.918711Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:13:16.918815Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:13:16.918868Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-17T17:13:16.918932Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-17T17:13:16.918970Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-17T17:13:16.923098Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T17:13:16.923161Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:13:16.924024Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:16.924066Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:16.929367Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:13:1 ... de 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:13:19.070963Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [1:61:2108], Recipient [1:666:2570]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715663 LockNode: 1 Status: STATUS_NOT_FOUND 2025-04-17T17:13:19.071056Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [1:61:2108], Recipient [1:755:2634]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715663 LockNode: 1 Status: STATUS_NOT_FOUND 2025-04-17T17:13:19.082278Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2025-04-17T17:13:19.082398Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:666:2570], Recipient [1:755:2634]: {TEvReadSet step# 3022 txid# 281474976715664 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2025-04-17T17:13:19.082440Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:13:19.082484Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715664 2025-04-17T17:13:19.082568Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-04-17T17:13:19.082627Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:755:2634], Recipient [1:666:2570]: {TEvReadSet step# 3022 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2025-04-17T17:13:19.082643Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:13:19.082658Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715664 ... performing the first select 2025-04-17T17:13:19.754467Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01js2bv77yb7hz2dah7y6a866j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDA3NWJkYTQtYWE3YzI1YjAtZWI0NTI4ZDEtOTc4OTkzZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:13:19.760743Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [1:1105:2876], Recipient [1:666:2570]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC KeysSize: 1 2025-04-17T17:13:19.761002Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-17T17:13:19.761070Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CheckRead 2025-04-17T17:13:19.761136Z node 1 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-17T17:13:19.761170Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CheckRead 2025-04-17T17:13:19.761249Z node 1 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-17T17:13:19.761280Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-17T17:13:19.761320Z node 1 :TX_DATASHARD TRACE: Activated operation [0:5] at 72075186224037888 2025-04-17T17:13:19.761372Z node 1 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-17T17:13:19.761396Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T17:13:19.761420Z node 1 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit ExecuteRead 2025-04-17T17:13:19.761440Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2025-04-17T17:13:19.761549Z node 1 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-04-17T17:13:19.761776Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715665, counter# 1 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-17T17:13:19.761839Z node 1 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v4000/18446744073709551615 2025-04-17T17:13:19.761891Z node 1 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[1:1105:2876], 0} after executionsCount# 1 2025-04-17T17:13:19.761937Z node 1 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[1:1105:2876], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T17:13:19.762007Z node 1 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[1:1105:2876], 0} finished in read 2025-04-17T17:13:19.762087Z node 1 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-17T17:13:19.762117Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteRead 2025-04-17T17:13:19.762139Z node 1 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T17:13:19.762166Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2025-04-17T17:13:19.762201Z node 1 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-17T17:13:19.762234Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T17:13:19.762262Z node 1 :TX_DATASHARD TRACE: Execution plan for [0:5] at 72075186224037888 has finished 2025-04-17T17:13:19.762295Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-17T17:13:19.762379Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-17T17:13:19.762660Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [1:61:2108], Recipient [1:666:2570]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715665 LockNode: 1 Status: STATUS_SUBSCRIBED 2025-04-17T17:13:19.762799Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [1:1107:2877], Recipient [1:755:2634]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 3 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC KeysSize: 1 2025-04-17T17:13:19.762909Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-04-17T17:13:19.762956Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037889 on unit CheckRead 2025-04-17T17:13:19.763000Z node 1 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037889 is Executed 2025-04-17T17:13:19.763022Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037889 executing on unit CheckRead 2025-04-17T17:13:19.763045Z node 1 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-04-17T17:13:19.763067Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037889 on unit BuildAndWaitDependencies 2025-04-17T17:13:19.763096Z node 1 :TX_DATASHARD TRACE: Activated operation [0:5] at 72075186224037889 2025-04-17T17:13:19.763122Z node 1 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037889 is Executed 2025-04-17T17:13:19.763164Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-04-17T17:13:19.763200Z node 1 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037889 to execution unit ExecuteRead 2025-04-17T17:13:19.763234Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037889 on unit ExecuteRead 2025-04-17T17:13:19.763319Z node 1 :TX_DATASHARD TRACE: 72075186224037889 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 3 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-04-17T17:13:19.763539Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 Acquired lock# 281474976715665, counter# 1 for [OwnerId: 72057594046644480, LocalPathId: 3] 2025-04-17T17:13:19.763601Z node 1 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v4000/18446744073709551615 2025-04-17T17:13:19.763632Z node 1 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[1:1107:2877], 0} after executionsCount# 1 2025-04-17T17:13:19.763664Z node 1 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[1:1107:2877], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T17:13:19.763715Z node 1 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[1:1107:2877], 0} finished in read 2025-04-17T17:13:19.763762Z node 1 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037889 is Executed 2025-04-17T17:13:19.763783Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037889 executing on unit ExecuteRead 2025-04-17T17:13:19.763805Z node 1 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037889 to execution unit CompletedOperations 2025-04-17T17:13:19.763825Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037889 on unit CompletedOperations 2025-04-17T17:13:19.763867Z node 1 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037889 is Executed 2025-04-17T17:13:19.763890Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037889 executing on unit CompletedOperations 2025-04-17T17:13:19.763919Z node 1 :TX_DATASHARD TRACE: Execution plan for [0:5] at 72075186224037889 has finished 2025-04-17T17:13:19.763960Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-04-17T17:13:19.764018Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-04-17T17:13:19.764242Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [1:61:2108], Recipient [1:755:2634]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715665 LockNode: 1 Status: STATUS_SUBSCRIBED 2025-04-17T17:13:19.765210Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [1:1105:2876], Recipient [1:666:2570]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-17T17:13:19.765268Z node 1 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-04-17T17:13:19.765951Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [1:1107:2877], Recipient [1:755:2634]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-17T17:13:19.766009Z node 1 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } >> SystemView::ShowCreateTableDefaultLiteral [GOOD] >> SystemView::ShowCreateTablePartitionAtKeys >> DataShardReadIterator::ShouldReverseReadMultipleKeysOneByOne [GOOD] >> DataShardReadIterator::ShouldReverseReadMultipleRanges ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ForceOnlineBetweenOnline_oo8 [GOOD] Test command err: 2025-04-17T17:13:11.691375Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:13:11.711080Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:13:11.711509Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:126:2151] 2025-04-17T17:13:11.711740Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:13:11.782486Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:13:11.793247Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:13:11.944479Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:13:11.946003Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-17T17:13:11.946079Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-17T17:13:11.946175Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-17T17:13:11.947461Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:13:11.947938Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:13:11.948002Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:189:2151] in generation 2 2025-04-17T17:13:11.948379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:13:11.948416Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:13:12.061915Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:13:12.130753Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-17T17:13:12.130953Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:13:12.131067Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-04-17T17:13:12.131102Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-17T17:13:12.131135Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-17T17:13:12.131171Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:13:12.131320Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:12.131383Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:12.131650Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-17T17:13:12.131751Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-17T17:13:12.131868Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T17:13:12.131903Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:13:12.131952Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-17T17:13:12.131990Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T17:13:12.132037Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T17:13:12.132066Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-17T17:13:12.132195Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T17:13:12.132293Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:12.132325Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:12.132376Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-04-17T17:13:12.134767Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:126:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-17T17:13:12.134838Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:13:12.134916Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T17:13:12.135034Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-17T17:13:12.135073Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-17T17:13:12.135146Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-17T17:13:12.135189Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T17:13:12.135225Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-17T17:13:12.135279Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-17T17:13:12.135315Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T17:13:12.135621Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-17T17:13:12.135656Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-17T17:13:12.135685Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-17T17:13:12.135713Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-17T17:13:12.135755Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-17T17:13:12.135786Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-17T17:13:12.135822Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-17T17:13:12.135851Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-17T17:13:12.135872Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-17T17:13:12.149110Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T17:13:12.149187Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T17:13:12.149222Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-17T17:13:12.149260Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-17T17:13:12.149314Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-17T17:13:12.149868Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:12.149920Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:12.149974Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-04-17T17:13:12.150065Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:126:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-17T17:13:12.150095Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:13:12.150220Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-17T17:13:12.150267Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T17:13:12.150317Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-17T17:13:12.150350Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-17T17:13:12.157127Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-17T17:13:12.157204Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:13:12.157495Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:12.157547Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:12.157604Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T17:13:12.157642Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:13:12.157701Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T17:13:12.157743Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-17T17:13:12.157797Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-17T17:13:12.157844Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T17:13:12.157874Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-17T17:13:12.157921Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-17T17:13:12.157954Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-17T17:13:12.158120Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-17T17:13:12.158154Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T17:13:12.158176Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-17T17:13:12.158196Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-17T17:13:12.158215Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-17T17:13:12.158282Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T17:13:12.158311Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-17T17:13:12.158342Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T17:13:12.158388Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T17:13:12.158429Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-17T17:13:12.158475Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-17T17:13:12.158519Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-17T17:13:12.158560Z node 1 :TX_DATA ... SHARD TRACE: Complete execution for [1000005:146] at 9437186 on unit CompleteOperation 2025-04-17T17:13:20.331096Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 146] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-04-17T17:13:20.331121Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-04-17T17:13:20.331137Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T17:13:20.331210Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T17:13:20.331226Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2025-04-17T17:13:20.331244Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-04-17T17:13:20.331265Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-04-17T17:13:20.331291Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T17:13:20.331344Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T17:13:20.331368Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2025-04-17T17:13:20.331389Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-04-17T17:13:20.331434Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-17T17:13:20.331450Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T17:13:20.331759Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-04-17T17:13:20.331795Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:13:20.331828Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2025-04-17T17:13:20.331925Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-04-17T17:13:20.331941Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:13:20.331954Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2025-04-17T17:13:20.332074Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-04-17T17:13:20.332120Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:13:20.332154Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2025-04-17T17:13:20.332227Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-04-17T17:13:20.332262Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:13:20.332292Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2025-04-17T17:13:20.332369Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-04-17T17:13:20.332390Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:13:20.332410Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2025-04-17T17:13:20.332458Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-04-17T17:13:20.332496Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:13:20.332518Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-04-17T17:13:20.332596Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-17T17:13:20.332616Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:13:20.332635Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 2025-04-17T17:13:20.332716Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T17:13:20.332748Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:149] at 9437184 on unit CompleteOperation 2025-04-17T17:13:20.332806Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-04-17T17:13:20.332853Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2025-04-17T17:13:20.332885Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:13:20.333006Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-17T17:13:20.333028Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-17T17:13:20.333046Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T17:13:20.333065Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:150] at 9437184 on unit CompleteOperation 2025-04-17T17:13:20.333098Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 150] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-17T17:13:20.333122Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:13:20.333200Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T17:13:20.333220Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:151] at 9437184 on unit CompleteOperation 2025-04-17T17:13:20.333257Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 151] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-04-17T17:13:20.333315Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2025-04-17T17:13:20.333356Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:13:20.333451Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T17:13:20.333487Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437184 on unit CompleteOperation 2025-04-17T17:13:20.333524Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-04-17T17:13:20.333561Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-04-17T17:13:20.333582Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:13:20.333663Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T17:13:20.333682Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:153] at 9437184 on unit CompleteOperation 2025-04-17T17:13:20.333709Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 153] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-17T17:13:20.333730Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:13:20.333801Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T17:13:20.333834Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:154] at 9437184 on unit CompleteOperation 2025-04-17T17:13:20.333886Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 154] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-04-17T17:13:20.333925Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-04-17T17:13:20.333957Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:13:20.334119Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2025-04-17T17:13:20.334149Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:13:20.334175Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2025-04-17T17:13:20.334345Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2025-04-17T17:13:20.334372Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:13:20.334395Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2025-04-17T17:13:20.334454Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-04-17T17:13:20.334476Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:13:20.334498Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2025-04-17T17:13:20.334589Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-04-17T17:13:20.334610Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:13:20.334627Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 >> SystemView::Describe [GOOD] >> SystemView::DescribeSystemFolder >> DataShardReadIterator::ShouldReadRangeArrow [GOOD] >> DataShardReadIterator::ShouldReadNoColumnsKeysRequestCellVec |75.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |75.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |75.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf >> DataShardReadIteratorBatchMode::ShouldHandleReadAck [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadWithPlanQueueRace >> YdbSdkSessionsPool::WaitQueue10 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::SimpleOnePartition [GOOD] Test command err: Trying to start YDB, gRPC: 63711, MsgBus: 28450 2025-04-17T17:11:34.106839Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494330929553416251:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:11:34.107244Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/0023c3/r3tmp/tmpROCmIy/pdisk_1.dat 2025-04-17T17:11:34.683655Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:11:34.683752Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:11:34.687364Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:11:34.687817Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63711, node 1 2025-04-17T17:11:34.799851Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:11:34.799874Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:11:34.799881Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:11:34.800004Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28450 TClient is connected to server localhost:28450 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:11:38.641952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:11:38.678546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:11:39.107031Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494330929553416251:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:11:39.107092Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:11:39.281296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:11:39.654090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:11:39.989211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:11:43.249974Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494330968208123574:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:11:43.250103Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:11:43.629761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:11:43.704547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:11:43.747349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:11:43.836077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:11:43.960409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:11:44.079383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:11:44.290456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:11:44.377101Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494330972503091556:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:11:44.377161Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:11:44.377482Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494330972503091561:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:11:44.404634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:11:44.475413Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494330972503091563:2482], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:11:44.581051Z node 1 :TX_PROXY ERROR: Actor# [1:7494330972503091625:3586] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:11:49.689131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:11:49.689390Z node 1 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 64051, MsgBus: 14539 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/0023c3/r3tmp/tmptElqik/pdisk_1.dat 2025-04-17T17:12:04.145889Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494331062332563116:2219];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:12:04.206434Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:12:04.286446Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:12:04.286528Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:12:04.298037Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:12:04.298979Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64051, node 2 2025-04-17T17:12:04.472731Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:12:04.472760Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:12:04.472766Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:12:04.472880Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14539 TClient is connected to server localhost:14539 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:12:05.860927Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:12:05.904293Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:12:05.935589Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:12:06.402377Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting.. ... atus: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:49.794754Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:12:49.848746Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:12:49.924091Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:12:49.967237Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:12:50.042120Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:12:50.082862Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:12:50.126821Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:12:50.207265Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494331257173713837:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:50.207348Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:50.207535Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494331257173713842:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:50.217108Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:12:50.231181Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494331257173713844:2480], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:12:50.294602Z node 3 :TX_PROXY ERROR: Actor# [3:7494331257173713900:3557] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:12:57.836586Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:12:57.836613Z node 3 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 61078, MsgBus: 3736 2025-04-17T17:13:03.589418Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494331312143815573:2215];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:13:03.754008Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/0023c3/r3tmp/tmpd0Gdsv/pdisk_1.dat 2025-04-17T17:13:04.391672Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:13:04.653845Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:13:04.653938Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:13:04.693133Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61078, node 4 2025-04-17T17:13:04.960684Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:13:04.960712Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:13:04.960720Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:13:04.960890Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3736 TClient is connected to server localhost:3736 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:13:06.388994Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:13:06.535640Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:13:07.214480Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:13:08.180435Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494331312143815573:2215];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:13:08.180497Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:13:08.185347Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:13:08.389583Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:13:10.548290Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494331342208588270:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:13:10.548393Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:13:10.593676Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T17:13:10.688404Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T17:13:10.731270Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T17:13:10.814375Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T17:13:10.874462Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T17:13:11.000161Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T17:13:11.169429Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T17:13:11.452093Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494331346503556254:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:13:11.452176Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:13:11.452392Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494331346503556259:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:13:11.456871Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T17:13:11.470094Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494331346503556261:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T17:13:11.524725Z node 4 :TX_PROXY ERROR: Actor# [4:7494331346503556319:3577] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:13:19.092746Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:13:19.092772Z node 4 :IMPORT WARN: Table profiles were not loaded >> TopicAutoscaling::ControlPlane_BackCompatibility [GOOD] >> TopicAutoscaling::ControlPlane_AutoscalingWithStorageSizeRetention >> YdbSdkSessionsPool::StressTestSync1 >> YdbSdkSessionsPool::StressTestAsync1 >> YdbSdkSessionsPool::WaitQueue1 [GOOD] >> DataShardOutOfOrder::TestShardRestartDuringWaitingRead [GOOD] |75.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |75.7%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |75.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow >> DataShardTxOrder::ImmediateBetweenOnline_Init [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::WaitQueue1 [GOOD] Test command err: 2025-04-17T17:13:18.157472Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494331377325805446:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:13:18.157515Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/00153a/r3tmp/tmpeWQkTv/pdisk_1.dat 2025-04-17T17:13:19.209266Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:13:19.294012Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:13:19.294108Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:13:19.298946Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:13:19.312546Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32489, node 1 2025-04-17T17:13:19.376932Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:13:19.432684Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:13:19.572795Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:13:19.572824Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:13:19.572832Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:13:19.572962Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32359 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:13:19.963567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:13:23.160264Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494331377325805446:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:13:23.160332Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |75.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |75.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |75.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock+EvWrite [GOOD] >> DataShardReadIterator::ShouldReadKeyOnlyValueColumn [GOOD] >> DataShardReadIterator::ShouldReadKeyValueColumnAndSomeKeyColumn >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKey+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKey-EvWrite >> DataShardReadIterator::ShouldLimitReadRangeChunk1Limit100 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit98 >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLockOutOfOrder [GOOD] >> SystemView::TopPartitionsByCpuRanges [GOOD] >> SystemView::TopPartitionsByCpuFollowers ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestShardRestartDuringWaitingRead [GOOD] Test command err: 2025-04-17T17:13:13.461258Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:13:13.461861Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:13:13.461916Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/002464/r3tmp/tmpKJPlpM/pdisk_1.dat 2025-04-17T17:13:14.223946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:13:14.278024Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:13:14.329678Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:13:14.329787Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:13:14.341229Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:13:14.432654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:13:14.979658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T17:13:15.293854Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:926:2736], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:13:15.293966Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:936:2741], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:13:15.294059Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:13:15.299197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T17:13:15.495267Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:940:2744], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T17:13:15.614069Z node 1 :TX_PROXY ERROR: Actor# [1:1001:2786] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:13:15.985176Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2bv38vem0tn046zx5rc4rm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmZlYjRiNjUtMmYyY2Q3NDAtYzhjN2IwZS05ZmI4MzdiZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:13:16.060922Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2bv3za6a7aed03cjc3bp9y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGUzNTNlYS1lNjExZTdhZC1hYmQ3M2Q4Yi1kMGFlNDY0Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:13:16.455192Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01js2bv41z8qkya4msb7avbv3e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjJjMDI2ZGEtOWU3NzIwNWYtMzc1ZmE0OGUtZmRlNjYwNWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ... waiting for commit read sets 2025-04-17T17:13:16.548737Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01js2bv4dt10bs8w1xmaj6fj1r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjJjMDI2ZGEtOWU3NzIwNWYtMzc1ZmE0OGUtZmRlNjYwNWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... sending immediate upsert ... waiting for immediate propose 2025-04-17T17:13:16.636142Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01js2bv4gvbt61jynd8cft7yka, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjljNmU0MjAtZTNhZTdlN2YtNmUxYjI3NjAtMTgxMDk0OWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... immediate upsert is blocked 2025-04-17T17:13:16.640429Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_OVERLOADED;details=Rejecting immediate write tx 281474976715666 because datashard 72075186224037889 is restarting;tx_id=281474976715666; 2025-04-17T17:13:16.647523Z node 1 :KQP_COMPUTE WARN: SelfId: [1:1198:2840], Table: `/Root/table-1` ([72057594046644480:2:1]), SessionActorId: [1:1079:2840]Got OVERLOADED for table `/Root/table-1`. ShardID=72075186224037889, Sink=[1:1198:2840]. Ignored this error.{
: Error: Rejecting immediate write tx 281474976715666 because datashard 72075186224037889 is restarting, code: 2006 } 2025-04-17T17:13:16.648100Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:1191:2840], SessionActorId: [1:1079:2840], statusCode=OVERLOADED. Issue=
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037889 is overloaded. Table `/Root/table-1`., code: 2006
: Error: Rejecting immediate write tx 281474976715666 because datashard 72075186224037889 is restarting, code: 2006 . sessionActorId=[1:1079:2840]. isRollback=0 2025-04-17T17:13:16.650467Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjljNmU0MjAtZTNhZTdlN2YtNmUxYjI3NjAtMTgxMDk0OWE=, ActorId: [1:1079:2840], ActorState: ExecuteState, TraceId: 01js2bv4gvbt61jynd8cft7yka, got TEvKqpBuffer::TEvError in ExecuteState, status: OVERLOADED send to: [1:1192:2840] from: [1:1191:2840] 2025-04-17T17:13:16.650848Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:1192:2840] TxId: 281474976715665. Ctx: { TraceId: 01js2bv4gvbt61jynd8cft7yka, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjljNmU0MjAtZTNhZTdlN2YtNmUxYjI3NjAtMTgxMDk0OWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. OVERLOADED: {
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037889 is overloaded. Table `/Root/table-1`., code: 2006 subissue: {
: Error: Rejecting immediate write tx 281474976715666 because datashard 72075186224037889 is restarting, code: 2006 } } 2025-04-17T17:13:16.651268Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjljNmU0MjAtZTNhZTdlN2YtNmUxYjI3NjAtMTgxMDk0OWE=, ActorId: [1:1079:2840], ActorState: ExecuteState, TraceId: 01js2bv4gvbt61jynd8cft7yka, Create QueryResponse for error on request, msg: 2025-04-17T17:13:16.651744Z node 1 :KQP_COMPUTE WARN: SelfId: [1:1164:2842], Table: `/Root/table-1` ([72057594046644480:2:1]), SessionActorId: [1:1081:2842]TEvDeliveryProblem was received from tablet: 72075186224037889 2025-04-17T17:13:16.651838Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:1154:2842], SessionActorId: [1:1081:2842], statusCode=UNDETERMINED. Issue=
: Error: State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037889., code: 2026 . sessionActorId=[1:1081:2842]. isRollback=0 2025-04-17T17:13:16.653665Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjJjMDI2ZGEtOWU3NzIwNWYtMzc1ZmE0OGUtZmRlNjYwNWQ=, ActorId: [1:1081:2842], ActorState: ExecuteState, TraceId: 01js2bv4dt10bs8w1xmaj6fj1r, got TEvKqpBuffer::TEvError in ExecuteState, status: UNDETERMINED send to: [1:1155:2842] from: [1:1154:2842] 2025-04-17T17:13:16.654063Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:1155:2842] TxId: 281474976715664. Ctx: { TraceId: 01js2bv4dt10bs8w1xmaj6fj1r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjJjMDI2ZGEtOWU3NzIwNWYtMzc1ZmE0OGUtZmRlNjYwNWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNDETERMINED: {
: Error: State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037889., code: 2026 } 2025-04-17T17:13:16.654510Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjJjMDI2ZGEtOWU3NzIwNWYtMzc1ZmE0OGUtZmRlNjYwNWQ=, ActorId: [1:1081:2842], ActorState: ExecuteState, TraceId: 01js2bv4dt10bs8w1xmaj6fj1r, Create QueryResponse for error on request, msg: 2025-04-17T17:13:17.161669Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01js2bv4s328mm22mrpwc0mnmz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTg0ZmY0NTktNWUxYTdkZDQtZDVmNDcwMjQtZmFjNjEwNjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } } 2025-04-17T17:13:22.376718Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:13:22.377044Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:13:22.377250Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/002464/r3tmp/tmpLEUeOZ/pdisk_1.dat 2025-04-17T17:13:22.979558Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:13:23.011609Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:13:23.053905Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:13:23.054053Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:13:23.069237Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:13:23.171357Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:13:23.543020Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T17:13:23.900063Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:832:2684], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:13:23.900965Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:842:2689], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:13:23.901054Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:13:23.906139Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T17:13:24.102937Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:846:2692], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T17:13:24.143546Z node 2 :TX_PROXY ERROR: Actor# [2:906:2733] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:13:24.243151Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2bvbnt82c24b5vqbxxa8r2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2YzZjNmY2YtNjkxYjQwN2YtZjg3NzRjZWMtNjAxMDY2ZGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:13:24.376547Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2bvc1dcwkxc0kfs9zyyj8n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzgwNWU5ZmQtZjQ3ZWQ2MzQtM2QwYjQwY2QtNDIzMDZmYTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... waiting for readsets 2025-04-17T17:13:25.130426Z node 2 :KQP_COMPUTE WARN: TxId: 281474976715664, task: 1, CA Id [2:1019:2811]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 0 2025-04-17T17:13:25.130750Z node 2 :KQP_COMPUTE WARN: SelfId: [2:992:2772], Table: `/Root/table-1` ([72057594046644480:2:1]), SessionActorId: [2:968:2772]TEvDeliveryProblem was received from tablet: 72075186224037888 2025-04-17T17:13:25.130857Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:982:2772], SessionActorId: [2:968:2772], statusCode=UNDETERMINED. Issue=
: Error: State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037888., code: 2026 . sessionActorId=[2:968:2772]. isRollback=0 2025-04-17T17:13:25.131074Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODliZjZkMjgtY2RmMzg0YmItNmIxY2U3MDMtOWMzNzAwZWU=, ActorId: [2:968:2772], ActorState: ExecuteState, TraceId: 01js2bvc5bdxf7qbhj9d00hsb2, got TEvKqpBuffer::TEvError in ExecuteState, status: UNDETERMINED send to: [2:983:2772] from: [2:982:2772] 2025-04-17T17:13:25.131599Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:983:2772] TxId: 281474976715663. Ctx: { TraceId: 01js2bvc5bdxf7qbhj9d00hsb2, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODliZjZkMjgtY2RmMzg0YmItNmIxY2U3MDMtOWMzNzAwZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNDETERMINED: {
: Error: State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037888., code: 2026 } 2025-04-17T17:13:25.137468Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODliZjZkMjgtY2RmMzg0YmItNmIxY2U3MDMtOWMzNzAwZWU=, ActorId: [2:968:2772], ActorState: ExecuteState, TraceId: 01js2bvc5bdxf7qbhj9d00hsb2, Create QueryResponse for error on request, msg: { items { uint32_value: 1 } items { uint32_value: 10 } }, { items { uint32_value: 3 } items { uint32_value: 30 } } >> YdbSdkSessionsPool::CustomPlan [GOOD] >> YdbSdkSessionsPool::FailTest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_Init [GOOD] Test command err: 2025-04-17T17:13:13.638336Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:13:13.673809Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:13:13.674267Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:126:2151] 2025-04-17T17:13:13.674505Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:13:13.764405Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:13:13.785612Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:13:13.973076Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:13:13.974634Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-17T17:13:13.974705Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-17T17:13:13.974796Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-17T17:13:13.975110Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:13:13.975543Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:13:13.975605Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:189:2151] in generation 2 2025-04-17T17:13:13.975935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:13:13.975968Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:13:14.094924Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:13:14.151973Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-17T17:13:14.153516Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:13:14.153692Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-04-17T17:13:14.153750Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-17T17:13:14.153806Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-17T17:13:14.153877Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:13:14.154096Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:14.154163Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:14.154546Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-17T17:13:14.154651Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-17T17:13:14.154794Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T17:13:14.154851Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:13:14.154918Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-17T17:13:14.154979Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T17:13:14.155060Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T17:13:14.155108Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-17T17:13:14.155158Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T17:13:14.155308Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:14.155344Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:14.155447Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-04-17T17:13:14.158205Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:126:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-17T17:13:14.158271Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:13:14.158369Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T17:13:14.158544Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-17T17:13:14.158606Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-17T17:13:14.158684Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-17T17:13:14.158768Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T17:13:14.158824Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-17T17:13:14.158885Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-17T17:13:14.158935Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T17:13:14.159332Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-17T17:13:14.159398Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-17T17:13:14.159440Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-17T17:13:14.159484Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-17T17:13:14.159530Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-17T17:13:14.159566Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-17T17:13:14.159619Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-17T17:13:14.159654Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-17T17:13:14.159680Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-17T17:13:14.177212Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T17:13:14.177298Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T17:13:14.177335Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-17T17:13:14.177383Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-17T17:13:14.177454Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-17T17:13:14.178113Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:14.178164Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:14.178205Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-04-17T17:13:14.178300Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:126:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-17T17:13:14.178342Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:13:14.178478Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-17T17:13:14.178531Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T17:13:14.178584Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-17T17:13:14.178617Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-17T17:13:14.188603Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-17T17:13:14.188686Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:13:14.188909Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:14.188953Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:14.189003Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T17:13:14.189053Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:13:14.189116Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T17:13:14.189182Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-17T17:13:14.189216Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-17T17:13:14.189261Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T17:13:14.189293Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-17T17:13:14.189349Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-17T17:13:14.189404Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-17T17:13:14.189572Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-17T17:13:14.189613Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T17:13:14.189641Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-17T17:13:14.189666Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-17T17:13:14.189691Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-17T17:13:14.189750Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T17:13:14.189778Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-17T17:13:14.189815Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T17:13:14.189873Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T17:13:14.189919Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-17T17:13:14.189993Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-17T17:13:14.190024Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-17T17:13:14.190075Z node 1 :TX_DATA ... :2227], Recipient [1:345:2313]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2025-04-17T17:13:26.206732Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:13:26.206751Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2025-04-17T17:13:26.206830Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:457:2399], Recipient [1:457:2399]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:26.206854Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:26.206888Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437186 2025-04-17T17:13:26.206918Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 active 1 active planned 1 immediate 0 planned 1 2025-04-17T17:13:26.206950Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [1000005:152] at 9437186 for LoadAndWaitInRS 2025-04-17T17:13:26.206974Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit LoadAndWaitInRS 2025-04-17T17:13:26.206999Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is Executed 2025-04-17T17:13:26.207024Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit LoadAndWaitInRS 2025-04-17T17:13:26.207064Z node 1 :TX_DATASHARD TRACE: Add [1000005:152] at 9437186 to execution unit ExecuteDataTx 2025-04-17T17:13:26.207103Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit ExecuteDataTx 2025-04-17T17:13:26.207562Z node 1 :TX_DATASHARD TRACE: Executed operation [1000005:152] at tablet 9437186 with status COMPLETE 2025-04-17T17:13:26.207606Z node 1 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:152] at 9437186: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 5, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-17T17:13:26.207653Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is ExecutedNoMoreRestarts 2025-04-17T17:13:26.207676Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit ExecuteDataTx 2025-04-17T17:13:26.207698Z node 1 :TX_DATASHARD TRACE: Add [1000005:152] at 9437186 to execution unit CompleteOperation 2025-04-17T17:13:26.207720Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit CompleteOperation 2025-04-17T17:13:26.207887Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is DelayComplete 2025-04-17T17:13:26.207926Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit CompleteOperation 2025-04-17T17:13:26.207953Z node 1 :TX_DATASHARD TRACE: Add [1000005:152] at 9437186 to execution unit CompletedOperations 2025-04-17T17:13:26.207979Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit CompletedOperations 2025-04-17T17:13:26.208005Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is Executed 2025-04-17T17:13:26.208036Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit CompletedOperations 2025-04-17T17:13:26.208062Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000005:152] at 9437186 has finished 2025-04-17T17:13:26.208086Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:13:26.208122Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437186 2025-04-17T17:13:26.208147Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437186 has no attached operations 2025-04-17T17:13:26.208169Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437186 2025-04-17T17:13:26.208365Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 113 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 37} 2025-04-17T17:13:26.208396Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:13:26.208421Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 113 2025-04-17T17:13:26.208547Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 116 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 38} 2025-04-17T17:13:26.208590Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:13:26.208620Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 116 2025-04-17T17:13:26.208718Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 119 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 39} 2025-04-17T17:13:26.208759Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:13:26.208782Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 119 2025-04-17T17:13:26.208875Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 122 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 40} 2025-04-17T17:13:26.208905Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:13:26.208927Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 122 2025-04-17T17:13:26.209008Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 125 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 41} 2025-04-17T17:13:26.209031Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:13:26.209051Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 125 2025-04-17T17:13:26.209116Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 128 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2025-04-17T17:13:26.209144Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:13:26.209167Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 128 2025-04-17T17:13:26.209238Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2025-04-17T17:13:26.209270Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:13:26.209290Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 131 2025-04-17T17:13:26.209371Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-04-17T17:13:26.209397Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:13:26.209419Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2025-04-17T17:13:26.209490Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-04-17T17:13:26.209518Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:13:26.209539Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2025-04-17T17:13:26.209610Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-04-17T17:13:26.209638Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:13:26.209665Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2025-04-17T17:13:26.209748Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-04-17T17:13:26.209776Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:13:26.209796Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2025-04-17T17:13:26.209872Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-04-17T17:13:26.209907Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:13:26.209932Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2025-04-17T17:13:26.210005Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-04-17T17:13:26.210026Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:13:26.210045Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-04-17T17:13:26.234379Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T17:13:26.234438Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2025-04-17T17:13:26.234501Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-04-17T17:13:26.234569Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-17T17:13:26.234603Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T17:13:26.234843Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-17T17:13:26.234876Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:13:26.234905Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 |75.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |75.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |75.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut >> SystemView::ShowCreateTablePartitionSettings [FAIL] >> SystemView::ShowCreateTableReadReplicas >> ResourcePoolsDdl::TestAlterResourcePool [GOOD] >> ResourcePoolsDdl::TestPoolSwitchToLimitedState ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock+EvWrite [GOOD] Test command err: 2025-04-17T17:13:14.311103Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:13:14.311589Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:13:14.311642Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/00246c/r3tmp/tmpy6tuRl/pdisk_1.dat 2025-04-17T17:13:14.787623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:13:14.881802Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:13:14.933611Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:13:14.933721Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:13:14.945113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:13:15.051806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:13:15.443684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T17:13:15.738706Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:829:2682], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:13:15.738815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:838:2687], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:13:15.738885Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:13:15.743932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T17:13:15.919529Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:843:2690], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T17:13:16.008393Z node 1 :TX_PROXY ERROR: Actor# [1:903:2731] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:13:16.401498Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2bv3pr3d34wad4yw1hd7px, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmRmZTM2OTMtNmJjMzVhY2YtOTVlMzNiZDctZDgyOTJjOGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:13:16.469337Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2bv4c7224gfd4t66dses7q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWM0ZDg2NWMtNTk4ODY3MjQtMzUzM2Y0NDItOGRlZDdkZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... waiting for at least 2 blocked commits ... blocked commit for tablet 72075186224037888 ... blocked commit for tablet 72075186224037889 2025-04-17T17:13:17.419046Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01js2bv56z9azs18gq1dgg8y9p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OThkMjhhZGMtY2NmODA0MTktNTM1Y2IyYzAtZDMzMzZmMzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:13:17.571246Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01js2bv5bt272rjyr5rn6abesb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDEzM2ZhMTMtM2I2OTA2YjMtODJmY2YwNmEtMjdiYTdhZTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... shards are ready for read-only immediate transactions ... waiting for at least 2 blocked commits ... blocked commit for tablet 72075186224037888 ... blocked commit for tablet 72075186224037889 2025-04-17T17:13:22.903810Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:13:22.904183Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:13:22.904424Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/00246c/r3tmp/tmpcIvexI/pdisk_1.dat 2025-04-17T17:13:23.366390Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:13:23.411114Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:13:23.453599Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:13:23.453730Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:13:23.465265Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:13:23.557397Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:13:23.918968Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T17:13:24.253774Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:830:2682], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:13:24.253898Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:841:2687], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:13:24.253988Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:13:24.277247Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T17:13:24.480086Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:844:2690], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T17:13:24.528064Z node 2 :TX_PROXY ERROR: Actor# [2:904:2731] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:13:24.650102Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2bvc0we08azaj4jfpeytz9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzZmYTY2Y2ItNzk0ZjQ4YWQtYThlNjllNzgtMmI3ODhhZWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:13:24.729520Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2bvce11rh5m9smab54mfkc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTUwNDBhNDEtNDIyMzk0ZjMtMjdjMjBkNzYtODRiNzZmYjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:13:25.464785Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01js2bvcpz7g1xrb6y5x7z519y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2FkMTUyMDQtOGJjY2IzNGMtZDc0ZDdkNDAtYjZhNWRjZWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2025-04-17T17:13:25.899672Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01js2bvdh71cferx4g57yrva76, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzEyZDRmOTYtNTU4NTMyYWUtYmI4MjRkY2EtODY1MzFjMjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:13:26.031200Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01js2bvdmz3gftma9a5p36dv1f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2FkMTUyMDQtOGJjY2IzNGMtZDc0ZDdkNDAtYjZhNWRjZWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:13:26.193465Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01js2bvdrk08qh5w9rg2r2a33q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2FkMTUyMDQtOGJjY2IzNGMtZDc0ZDdkNDAtYjZhNWRjZWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:13:26.267549Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=N2FkMTUyMDQtOGJjY2IzNGMtZDc0ZDdkNDAtYjZhNWRjZWM=, ActorId: [2:975:2779], ActorState: ExecuteState, TraceId: 01js2bvdxt2sp5w7s39rmrzr96, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop-UseSink [GOOD] >> DataShardOutOfOrder::TestReadTableSingleShardImmediate [GOOD] |75.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |75.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |75.7%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLockOutOfOrder [GOOD] Test command err: 2025-04-17T17:13:13.038948Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:13:13.039534Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:13:13.039587Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/002457/r3tmp/tmpF9m0mN/pdisk_1.dat 2025-04-17T17:13:14.003313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:13:14.046160Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:13:14.085732Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:13:14.085856Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:13:14.101203Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:13:14.187384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:13:14.544029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T17:13:14.890816Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:829:2682], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:13:14.890983Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:838:2687], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:13:14.891155Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:13:14.911255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T17:13:15.133615Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:843:2690], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T17:13:15.293547Z node 1 :TX_PROXY ERROR: Actor# [1:903:2731] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:13:15.794915Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2bv2w22g2bh6155vn3ckwn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWYzYWU0ZGUtZjJkYmJjZDMtYjhkNGNkNzUtM2I3OGJjNWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:13:15.906596Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2bv3sjazzddjxm15b48yv2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzdlNzZlZDQtM2VjZjU5Y2QtNjZjZDY0NDktYTlmYTE5MDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:13:16.569175Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01js2bv43qdsjpz86vma6d45gz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzQyNDI5NzMtYTQ2YjI3YTItOTE5NGZiYTMtNzkwZGQ4OGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2025-04-17T17:13:16.986877Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01js2bv4v7fsqt4n809ecsbt7a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmFmYzMwZi04YWQwZDY4Mi0yZGQxMjVjYS0zMWY2MGNiNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:13:17.195681Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01js2bv4yf1f1drcd10qp0j4ax, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzQyNDI5NzMtYTQ2YjI3YTItOTE5NGZiYTMtNzkwZGQ4OGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:13:17.475169Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01js2bv56q2zvm2gmbnc1ysk0z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzQyNDI5NzMtYTQ2YjI3YTItOTE5NGZiYTMtNzkwZGQ4OGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:13:17.610097Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzQyNDI5NzMtYTQ2YjI3YTItOTE5NGZiYTMtNzkwZGQ4OGE=, ActorId: [1:966:2781], ActorState: ExecuteState, TraceId: 01js2bv5dk862wt2b2mn6refph, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken 2025-04-17T17:13:17.625104Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01js2bv5dk862wt2b2mn6refph, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzQyNDI5NzMtYTQ2YjI3YTItOTE5NGZiYTMtNzkwZGQ4OGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:13:21.905219Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:13:21.905487Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:13:21.905631Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/002457/r3tmp/tmpIeESKH/pdisk_1.dat 2025-04-17T17:13:22.392490Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:13:22.457758Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:13:22.509130Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:13:22.509266Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:13:22.521445Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:13:22.708521Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:13:23.095342Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T17:13:23.530058Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:830:2682], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:13:23.530176Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:841:2687], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:13:23.530245Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:13:23.544258Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T17:13:23.750272Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:844:2690], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T17:13:23.795545Z node 2 :TX_PROXY ERROR: Actor# [2:904:2731] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:13:23.920959Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2bvba8bbgvrhwswxdwv5vv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDIyMDU0YzctOGMzZThmNzktNGIxYTNiN2UtN2VmOWQ3ZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:13:24.102694Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2bvbqxcg7j6m4b9bj2ey1x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2JjMGY5MDEtMmQ4NzdjMzMtNDFmM2RhMzktNThjMmI2ODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... performing the first select 2025-04-17T17:13:24.938632Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01js2bvc403j2fpqk68s4v2ty8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjQwN2QxLTMzMjljMjY1LTdmZWEwNTA3LWEzMDM2YjY4, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2025-04-17T17:13:25.575297Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01js2bvcq5b09yac0kv9r7ezcg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODFlN2ViODQtOTVjZDA5ZDctNjRiMGUyNC1hYTg1MzJhNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } ... waiting for commit read sets 2025-04-17T17:13:25.679595Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01js2bvdbb2apkmb2eza6xtr22, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODFlN2ViODQtOTVjZDA5ZDctNjRiMGUyNC1hYTg1MzJhNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... captured readset ... captured readset ... performing an upsert 2025-04-17T17:13:26.140935Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01js2bvdrwevyxfbxma0xkvat4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjFhYmE4MDMtODQ1MjYzYzQtNTQ2ODc3NjktY2JmZGUyY2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... performing the second select 2025-04-17T17:13:26.260181Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01js2bvdwc8t5ga2aw699ya6bt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjQwN2QxLTMzMjljMjY1LTdmZWEwNTA3LWEzMDM2YjY4, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... performing the third select 2025-04-17T17:13:26.351919Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01js2bvdzq9tf0cvcp3b8r614r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjQwN2QxLTMzMjljMjY1LTdmZWEwNTA3LWEzMDM2YjY4, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... performing the last upsert and commit 2025-04-17T17:13:26.457103Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjQwN2QxLTMzMjljMjY1LTdmZWEwNTA3LWEzMDM2YjY4, ActorId: [2:975:2779], ActorState: ExecuteState, TraceId: 01js2bve2k6deftbq2zma9wftb, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken >> YdbSdkSessionsPool::Get1Session [GOOD] >> YdbSdkSessionsPool::PeriodicTask1 >> SystemView::AuthUsers_LockUnlock [GOOD] >> SystemView::AuthUsers_Access >> SystemView::PgTablesOneSchemeShardDataQuery [GOOD] >> TopicAutoscaling::Simple_PQv1 [GOOD] >> TopicAutoscaling::WithDir_PartitionSplit_AutosplitByLoad >> SystemView::QueryStats >> DataShardReadIterator::ShouldReverseReadMultipleRanges [GOOD] >> DataShardReadIterator::ShouldReturnMvccSnapshotFromFuture ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop-UseSink [GOOD] Test command err: 2025-04-17T17:13:15.469836Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:13:15.470394Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:13:15.470455Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/002459/r3tmp/tmptd1OtM/pdisk_1.dat 2025-04-17T17:13:15.926862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:13:15.975786Z node 1 :KQP_RESOURCE_MANAGER INFO: Updated table service config: ComputeActorsCount: 10000 ChannelBufferSize: 8388608 MkqlLightProgramMemoryLimit: 1048576 MkqlHeavyProgramMemoryLimit: 31457280 QueryMemoryLimit: 32212254720 PublishStatisticsIntervalSec: 2 MaxTotalChannelBuffersSize: 2147483648 MinChannelBufferSize: 2048 2025-04-17T17:13:15.975902Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-17T17:13:15.975964Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 7 2025-04-17T17:13:15.976084Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Updated table service config. 2025-04-17T17:13:15.976290Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:13:16.020416Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T17:13:16.021295Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:13:16.021412Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:13:16.021671Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-17T17:13:16.035673Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:13:16.125165Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-17T17:13:16.125227Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-17T17:13:16.125390Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:642:2550] 2025-04-17T17:13:16.258363Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value1" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value2" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-17T17:13:16.258451Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T17:13:16.258994Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T17:13:16.259121Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T17:13:16.259456Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T17:13:16.259632Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T17:13:16.259725Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-17T17:13:16.261520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:13:16.261992Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] txid# 281474976715657 HANDLE EvClientConnected 2025-04-17T17:13:16.262607Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-17T17:13:16.262688Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-17T17:13:16.298219Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:658:2565], Recipient [1:667:2571]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:13:16.299294Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:658:2565], Recipient [1:667:2571]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:13:16.299761Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:667:2571] 2025-04-17T17:13:16.299987Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:13:16.344577Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:658:2565], Recipient [1:667:2571]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:13:16.345342Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:13:16.345442Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:13:16.346902Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:13:16.346978Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:13:16.347029Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:13:16.347334Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:13:16.347462Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:13:16.347545Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:683:2571] in generation 1 2025-04-17T17:13:16.360405Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:13:16.405549Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:13:16.405722Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:13:16.405823Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:685:2581] 2025-04-17T17:13:16.405854Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:13:16.405882Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:13:16.405912Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:13:16.406120Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:667:2571], Recipient [1:667:2571]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:16.406168Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:16.406392Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:13:16.406478Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:13:16.406604Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:13:16.406641Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:13:16.406679Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T17:13:16.406709Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:13:16.406752Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:13:16.406780Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:13:16.406823Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:13:16.407167Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:672:2573], Recipient [1:667:2571]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:16.407201Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:16.407246Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:672:2573], sessionId# [0:0:0] 2025-04-17T17:13:16.407301Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:672:2573] 2025-04-17T17:13:16.407335Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:13:16.407433Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:13:16.407615Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T17:13:16.407657Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:13:16.407821Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:13:16.407881Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:13:16.407928Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T17:13:16.407969Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T17:13:16.408019Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:13:16.408439Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T17:13:16.408513Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T17:13:16.408554Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:13:16.408656Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:13:16.408729Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T17:13:16.408768Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:13:16.408800Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T17:13:16.408827Z node 1 :TX_DATASH ... ::TEvSchemaChangedResult 2025-04-17T17:13:27.763610Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715663 datashard 72075186224037888 state Ready 2025-04-17T17:13:27.763689Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 --- resending captured proposals --- waiting for result 2025-04-17T17:13:27.765565Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553206, Sender [2:890:2720], Recipient [2:667:2571]: NKikimrTxDataShard.TEvKqpScan TxId: 281474976715662 ScanId: 2 LocalPathId: 2 TablePath: "/Root/table-1" SchemaVersion: 1 ColumnTags: 3 ColumnTypes: 2 Ranges { From: "\001\000\000\000\000\200" To: "" FromInclusive: true ToInclusive: false } Snapshot { Step: 2000 TxId: 281474976715661 } Generation: 1 ItemsLimit: 0 Reverse: false DataFormat: FORMAT_CELLVEC StatsMode: DQ_STATS_MODE_NONE ColumnTypeInfos { } LockNodeId: 0 2025-04-17T17:13:27.765703Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715662. Table '/Root/table-1' schema version changed at 72075186224037888 2025-04-17T17:13:27.765845Z node 2 :KQP_COMPUTE WARN: SelfId: [2:890:2720]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/table-1' scheme changed., code: 2028 , tablet id: 72075186224037888, actor_id: [2:667:2571] 2025-04-17T17:13:27.765895Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:890:2720]. Enqueue for resolve 72075186224037888 2025-04-17T17:13:27.765949Z node 2 :KQP_COMPUTE DEBUG: fline=kqp_scan_compute_manager.h:359;event=scanner_finished;tablet_id=72075186224037888;stop_shard=1; 2025-04-17T17:13:27.765996Z node 2 :KQP_COMPUTE DEBUG: fline=kqp_scan_compute_manager.h:93;event=stop_scanner;actor_id=NO_VALUE_OPTIONAL;message=;final_flag=1; 2025-04-17T17:13:27.766107Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:890:2720]. Sending TEvResolveKeySet update for table '/Root/table-1', range: [(Uint32 : NULL) ; ()), attempt #1 2025-04-17T17:13:27.766323Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:890:2720]. Received TEvResolveKeySetResult update for table '/Root/table-1' 2025-04-17T17:13:27.766378Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:890:2720]. Resolve request failed for table '/Root/table-1', ErrorCount# 1 2025-04-17T17:13:27.766478Z node 2 :KQP_COMPUTE DEBUG: kqp_scan_compute_actor.cpp:167 :TEvTerminateFromFetcher: [2:890:2720]/[2:888:2718] 2025-04-17T17:13:27.766561Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:888:2718], TxId: 281474976715662, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NGM5NDE3MmEtZTRhMTUzYS1iMzhjOWViNi0zMTg4Y2IwMw==. TraceId : 01js2bvdjj5z7akznpg2m7pg9t. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. InternalError: SCHEME_ERROR KIKIMR_SCHEME_MISMATCH: {
: Error: Table '/Root/table-1' scheme changed., code: 2028 }. 2025-04-17T17:13:27.766734Z node 2 :KQP_COMPUTE DEBUG: TxId: 281474976715662, task: 1. pass away 2025-04-17T17:13:27.766824Z node 2 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715662;task_id=1;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2025-04-17T17:13:27.769089Z node 2 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976715662, taskId: 1. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-04-17T17:13:27.769242Z node 2 :KQP_COMPUTE DEBUG: fline=kqp_scan_fetcher_actor.cpp:99;event=TEvTerminateFromCompute;sender=[2:888:2718];info={
: Error: COMPUTE_STATE_FAILURE }; 2025-04-17T17:13:27.769313Z node 2 :KQP_COMPUTE DEBUG: fline=kqp_scan_compute_manager.h:289;event=abort_all_scanners;error_message=Send abort execution from compute actor, message: {
: Error: COMPUTE_STATE_FAILURE }; 2025-04-17T17:13:27.769496Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Schedule publish at 1970-01-01T00:00:04.000000Z, after 1.550000s 2025-04-17T17:13:27.769733Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:883:2692] TxId: 281474976715662. Ctx: { TraceId: 01js2bvdjj5z7akznpg2m7pg9t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGM5NDE3MmEtZTRhMTUzYS1iMzhjOWViNi0zMTg4Y2IwMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:888:2718], task: 1, state: COMPUTE_STATE_FAILURE, stats: { CpuTimeUs: 306388 Tasks { TaskId: 1 CpuTimeUs: 303633 Tables { TablePath: "/Root/table-1" } ComputeCpuTimeUs: 14 BuildCpuTimeUs: 303619 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-765mtltz7i" NodeId: 2 CreateTimeMs: 1744910006900 UpdateTimeMs: 1744910007766 } MaxMemoryUsage: 1048576 } 2025-04-17T17:13:27.769846Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715662. Ctx: { TraceId: 01js2bvdjj5z7akznpg2m7pg9t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGM5NDE3MmEtZTRhMTUzYS1iMzhjOWViNi0zMTg4Y2IwMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:888:2718] 2025-04-17T17:13:27.769942Z node 2 :KQP_EXECUTER INFO: ActorId: [2:883:2692] TxId: 281474976715662. Ctx: { TraceId: 01js2bvdjj5z7akznpg2m7pg9t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGM5NDE3MmEtZTRhMTUzYS1iMzhjOWViNi0zMTg4Y2IwMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. task: 1, does not have the CA id yet or is already complete 2025-04-17T17:13:27.770011Z node 2 :KQP_EXECUTER INFO: ActorId: [2:883:2692] TxId: 281474976715662. Ctx: { TraceId: 01js2bvdjj5z7akznpg2m7pg9t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGM5NDE3MmEtZTRhMTUzYS1iMzhjOWViNi0zMTg4Y2IwMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. aborting compute actor execution, message: {
: Error: Terminate execution }, compute actor: [2:889:2719], task: 2 2025-04-17T17:13:27.770171Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:883:2692] TxId: 281474976715662. Ctx: { TraceId: 01js2bvdjj5z7akznpg2m7pg9t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGM5NDE3MmEtZTRhMTUzYS1iMzhjOWViNi0zMTg4Y2IwMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-17T17:13:27.770263Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:889:2719], TxId: 281474976715662, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=NGM5NDE3MmEtZTRhMTUzYS1iMzhjOWViNi0zMTg4Y2IwMw==. CustomerSuppliedId : . TraceId : 01js2bvdjj5z7akznpg2m7pg9t. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646735 2025-04-17T17:13:27.770340Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:889:2719], TxId: 281474976715662, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=NGM5NDE3MmEtZTRhMTUzYS1iMzhjOWViNi0zMTg4Y2IwMw==. CustomerSuppliedId : . TraceId : 01js2bvdjj5z7akznpg2m7pg9t. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Handle abort execution event from: [2:883:2692], status: SCHEME_ERROR, reason: {
: Error: Terminate execution } 2025-04-17T17:13:27.770459Z node 2 :KQP_COMPUTE DEBUG: TxId: 281474976715662, task: 2. pass away 2025-04-17T17:13:27.770548Z node 2 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715662;task_id=2;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2025-04-17T17:13:27.773532Z node 2 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976715662, taskId: 2. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-04-17T17:13:27.773782Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGM5NDE3MmEtZTRhMTUzYS1iMzhjOWViNi0zMTg4Y2IwMw==, ActorId: [2:857:2692], ActorState: ExecuteState, TraceId: 01js2bvdjj5z7akznpg2m7pg9t, Create QueryResponse for error on request, msg: 2025-04-17T17:13:27.774232Z node 2 :TX_PROXY DEBUG: actor# [2:59:2106] Handle TEvExecuteKqpTransaction 2025-04-17T17:13:27.774285Z node 2 :TX_PROXY DEBUG: actor# [2:59:2106] TxId# 281474976715664 ProcessProposeKqpTransaction 2025-04-17T17:13:27.774789Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715661] shutting down 2025-04-17T17:13:27.775017Z node 2 :TX_PROXY DEBUG: actor# [2:59:2106] Handle TEvProposeTransaction 2025-04-17T17:13:27.775059Z node 2 :TX_PROXY DEBUG: actor# [2:59:2106] TxId# 0 ProcessProposeTransaction 2025-04-17T17:13:27.775154Z node 2 :TX_PROXY DEBUG: actor# [2:59:2106] Cookie# 0 userReqId# "" txid# 0 reqId# [2:926:2751] SnapshotReq marker# P0 2025-04-17T17:13:27.775436Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715664. Resolved key sets: 0 2025-04-17T17:13:27.775637Z node 2 :TX_PROXY DEBUG: Actor# [2:928:2751] txid# 0 HANDLE EvNavigateKeySetResult TResolveTablesActor marker# P1 ErrorCount# 0 2025-04-17T17:13:27.775780Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01js2bvdjj5z7akznpg2m7pg9t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGM5NDE3MmEtZTRhMTUzYS1iMzhjOWViNi0zMTg4Y2IwMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:13:27.775842Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715664. Ctx: { TraceId: 01js2bvdjj5z7akznpg2m7pg9t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGM5NDE3MmEtZTRhMTUzYS1iMzhjOWViNi0zMTg4Y2IwMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-04-17T17:13:27.775899Z node 2 :KQP_EXECUTER INFO: ActorId: [2:924:2692] TxId: 281474976715664. Ctx: { TraceId: 01js2bvdjj5z7akznpg2m7pg9t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGM5NDE3MmEtZTRhMTUzYS1iMzhjOWViNi0zMTg4Y2IwMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-17T17:13:27.776452Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:924:2692] TxId: 281474976715664. Ctx: { TraceId: 01js2bvdjj5z7akznpg2m7pg9t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGM5NDE3MmEtZTRhMTUzYS1iMzhjOWViNi0zMTg4Y2IwMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-17T17:13:27.776525Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:924:2692] TxId: 281474976715664. Ctx: { TraceId: 01js2bvdjj5z7akznpg2m7pg9t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGM5NDE3MmEtZTRhMTUzYS1iMzhjOWViNi0zMTg4Y2IwMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-04-17T17:13:27.776864Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [2:594:2519], selfId: [2:57:2104], source: [2:857:2692] 2025-04-17T17:13:27.776923Z node 2 :TX_PROXY DEBUG: Actor# [2:928:2751] txid# 0 HANDLE EvResolveKeySetResult TResolveTablesActor marker# P2 ErrorCount# 0 2025-04-17T17:13:27.777039Z node 2 :TX_PROXY DEBUG: Actor# [2:926:2751] SEND TEvDiscardVolatileSnapshotRequest to datashard 72075186224037888 marker# P3 2025-04-17T17:13:27.777328Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:926:2751], Recipient [2:667:2571]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715661 2025-04-17T17:13:27.784154Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=NGM5NDE3MmEtZTRhMTUzYS1iMzhjOWViNi0zMTg4Y2IwMw==, workerId: [2:857:2692], local sessions count: 0 Response { QueryIssues { message: "Table \'/Root/table-1\' scheme changed." issue_code: 2028 severity: 1 } QueryIssues { message: "Query invalidated on scheme/internal error during Scan execution" issue_code: 2019 severity: 1 } TxMeta { } } YdbStatus: ABORTED ConsumedRu: 787 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestReadTableSingleShardImmediate [GOOD] Test command err: 2025-04-17T17:13:16.914863Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:13:16.915411Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:13:16.915472Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/00244a/r3tmp/tmpedQ0t2/pdisk_1.dat 2025-04-17T17:13:17.595863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:13:17.721134Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:13:17.765866Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T17:13:17.766762Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-17T17:13:17.766951Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:13:17.767061Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:13:17.781393Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:13:17.875954Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-17T17:13:17.876033Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-17T17:13:17.881104Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-04-17T17:13:18.083886Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 2 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-17T17:13:18.083967Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T17:13:18.084560Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T17:13:18.084674Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T17:13:18.085006Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T17:13:18.085205Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T17:13:18.085291Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-17T17:13:18.086932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:13:18.087394Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-04-17T17:13:18.099527Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-17T17:13:18.099617Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-17T17:13:18.163163Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:672:2573]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:13:18.164505Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:672:2573]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:13:18.164970Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:672:2573] 2025-04-17T17:13:18.165205Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:13:18.189730Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:672:2573]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:13:18.224569Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:658:2565], Recipient [1:674:2575]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:13:18.225742Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:658:2565], Recipient [1:674:2575]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:13:18.226016Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:674:2575] 2025-04-17T17:13:18.226151Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:13:18.233978Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:658:2565], Recipient [1:674:2575]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:13:18.234392Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:13:18.234499Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:13:18.236313Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:13:18.236405Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:13:18.236475Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:13:18.236783Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:13:18.236928Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:13:18.237002Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:705:2573] in generation 1 2025-04-17T17:13:18.237291Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:13:18.237367Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:13:18.238286Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-17T17:13:18.238332Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-17T17:13:18.238361Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-17T17:13:18.238556Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:13:18.238689Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:13:18.238727Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:706:2575] in generation 1 2025-04-17T17:13:18.249554Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:13:18.271570Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:13:18.271751Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:13:18.271877Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:709:2594] 2025-04-17T17:13:18.271912Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:13:18.271945Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:13:18.271979Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:13:18.272255Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:672:2573], Recipient [1:672:2573]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:18.272309Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:18.272374Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:13:18.272401Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-17T17:13:18.272441Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:13:18.272475Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:710:2595] 2025-04-17T17:13:18.272499Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-17T17:13:18.272523Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-17T17:13:18.272545Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T17:13:18.272727Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:674:2575], Recipient [1:674:2575]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:18.272745Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:18.272893Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:13:18.272955Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:13:18.273038Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:13:18.273068Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:13:18.273107Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T17:13:18.273132Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:13:18.273163Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:13:18.273187Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:13:18.273215Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:13:18.273264Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-17T17:13:18.273301Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-17T17:13:18.273678Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:690:2584], Recipient [1:672:2573]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:18.273705Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:18.273737Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:665:2569], serverId# [1:690:2584], sessionId# [0:0:0] 2025-04-17T17:13:18.273767Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 202 ... an for [0:2] at 72075186224037888 executing on unit CheckWrite 2025-04-17T17:13:27.996146Z node 2 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-17T17:13:27.996194Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-17T17:13:27.996240Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-17T17:13:27.996299Z node 2 :TX_DATASHARD TRACE: Activated operation [0:2] at 72075186224037888 2025-04-17T17:13:27.996342Z node 2 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-04-17T17:13:27.996369Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T17:13:27.996391Z node 2 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit ExecuteWrite 2025-04-17T17:13:27.996412Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit ExecuteWrite 2025-04-17T17:13:27.996442Z node 2 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-17T17:13:27.996487Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-17T17:13:27.996610Z node 2 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=1 2025-04-17T17:13:27.996659Z node 2 :TX_DATASHARD TRACE: add locks to result: 0 2025-04-17T17:13:27.996723Z node 2 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:13:27.996751Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteWrite 2025-04-17T17:13:27.996804Z node 2 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit FinishProposeWrite 2025-04-17T17:13:27.996850Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit FinishProposeWrite 2025-04-17T17:13:27.996931Z node 2 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T17:13:27.996961Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit FinishProposeWrite 2025-04-17T17:13:27.996990Z node 2 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T17:13:27.997017Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2025-04-17T17:13:27.997058Z node 2 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-04-17T17:13:27.997077Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T17:13:27.997101Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:2] at 72075186224037888 has finished 2025-04-17T17:13:28.007903Z node 2 :TX_DATASHARD TRACE: TTxWrite complete: at tablet# 72075186224037888 2025-04-17T17:13:28.007981Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:2] at 72075186224037888 on unit FinishProposeWrite 2025-04-17T17:13:28.008082Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 2 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2025-04-17T17:13:28.008206Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:13:28.010158Z node 2 :TX_PROXY DEBUG: actor# [2:59:2106] Handle TEvProposeTransaction 2025-04-17T17:13:28.010216Z node 2 :TX_PROXY DEBUG: actor# [2:59:2106] TxId# 281474976715661 ProcessProposeTransaction 2025-04-17T17:13:28.010408Z node 2 :TX_PROXY DEBUG: actor# [2:59:2106] Cookie# 0 userReqId# "" txid# 281474976715661 SEND to# [2:861:2691] DataReq marker# P0 2025-04-17T17:13:28.010534Z node 2 :TX_PROXY DEBUG: Actor# [2:861:2691] Cookie# 0 txid# 281474976715661 HANDLE TDataReq marker# P1 2025-04-17T17:13:28.010769Z node 2 :TX_PROXY DEBUG: Actor# [2:861:2691] txid# 281474976715661 HANDLE EvNavigateKeySetResult TDataReq marker# P3b ErrorCount# 0 2025-04-17T17:13:28.010971Z node 2 :TX_PROXY DEBUG: Actor# [2:861:2691] txid# 281474976715661 HANDLE EvResolveKeySetResult TDataReq marker# P3 ErrorCount# 0 2025-04-17T17:13:28.011067Z node 2 :TX_PROXY DEBUG: Actor# [2:861:2691] txid# 281474976715661 SEND TEvProposeTransaction to datashard 72075186224037888 with read table request affected shards 1 followers disallowed marker# P4b 2025-04-17T17:13:28.011501Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [2:861:2691], Recipient [2:666:2570]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCAN SourceDeprecated { RawX1: 861 RawX2: 8589937283 } TxBody: " \0018\001B8\n\014\010\200\202\224\204\200\200\200\200\001\020\002\022\t\010\001\022\003key\030\002\022\013\010\002\022\005value\030\002\032\016\n\006\001\000\000\000\000\200\022\000\030\001 \001 \001H\001R\022\t]\003\000\000\000\000\000\000\021\203\n\000\000\002\000\000\000" TxId: 281474976715661 ExecLevel: 0 Flags: 8 2025-04-17T17:13:28.038832Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:13:28.039018Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:13:28.039262Z node 2 :TX_DATASHARD TRACE: -- AddReadRange: [(Uint32 : NULL) ; ()] table: [72057594046644480:2:0] 2025-04-17T17:13:28.039375Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit CheckDataTx 2025-04-17T17:13:28.039437Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2025-04-17T17:13:28.039493Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit CheckDataTx 2025-04-17T17:13:28.039533Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-17T17:13:28.039572Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-17T17:13:28.039620Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2025-04-17T17:13:28.039679Z node 2 :TX_DATASHARD TRACE: Activated operation [0:281474976715661] at 72075186224037888 2025-04-17T17:13:28.039717Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2025-04-17T17:13:28.039739Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T17:13:28.039786Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit MakeScanSnapshot 2025-04-17T17:13:28.039810Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit MakeScanSnapshot 2025-04-17T17:13:28.039837Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2025-04-17T17:13:28.039857Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit MakeScanSnapshot 2025-04-17T17:13:28.039878Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit WaitForStreamClearance 2025-04-17T17:13:28.039898Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit WaitForStreamClearance 2025-04-17T17:13:28.039950Z node 2 :TX_DATASHARD TRACE: Requested stream clearance from [2:861:2691] for [0:281474976715661] at 72075186224037888 2025-04-17T17:13:28.039981Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Continue 2025-04-17T17:13:28.040041Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:13:28.040173Z node 2 :TX_PROXY DEBUG: Got clearance request, shard: 72075186224037888, txid: 281474976715661 2025-04-17T17:13:28.040259Z node 2 :TX_PROXY DEBUG: Collected all clerance requests, txid: 281474976715661 2025-04-17T17:13:28.040299Z node 2 :TX_PROXY DEBUG: Send stream clearance, shard: 72075186224037888, txid: 281474976715661, cleared: 1 2025-04-17T17:13:28.040506Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287942, Sender [2:861:2691], Recipient [2:666:2570]: NKikimrTx.TEvStreamClearancePending TxId: 281474976715661 2025-04-17T17:13:28.040551Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearancePending 2025-04-17T17:13:28.040664Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287940, Sender [2:861:2691], Recipient [2:666:2570]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715661 Cleared: true 2025-04-17T17:13:28.040704Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2025-04-17T17:13:28.040789Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:666:2570], Recipient [2:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:28.040818Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:28.040903Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:13:28.040949Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-17T17:13:28.040994Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-04-17T17:13:28.041028Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit WaitForStreamClearance 2025-04-17T17:13:28.041068Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715661] at 72075186224037888 2025-04-17T17:13:28.041101Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2025-04-17T17:13:28.041156Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit WaitForStreamClearance 2025-04-17T17:13:28.041189Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit ReadTableScan 2025-04-17T17:13:28.041223Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit ReadTableScan 2025-04-17T17:13:28.041438Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Continue 2025-04-17T17:13:28.041467Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-17T17:13:28.041499Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-17T17:13:28.041532Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:13:28.041572Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:13:28.041670Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:13:28.042115Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:867:2696], Recipient [2:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-04-17T17:13:28.042242Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor |75.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |75.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |75.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain >> DataShardReadIterator::ShouldReadNoColumnsKeysRequestCellVec [GOOD] >> DataShardReadIterator::ShouldReadNoColumnsKeysRequestArrow |75.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest >> TExportToS3Tests::ShouldExcludeBackupTableFromStats [GOOD] >> TExportToS3Tests::ShouldCheckQuotas >> YdbSdkSessionsPool::WaitQueue10 [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadWithPlanQueueRace [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadHasRequiredDependencies |75.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |75.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::WaitQueue10 [GOOD] Test command err: 2025-04-17T17:13:24.268959Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494331401740826006:2218];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:13:24.269003Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/00137e/r3tmp/tmp7BTnFG/pdisk_1.dat 2025-04-17T17:13:25.399437Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:13:25.413852Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:13:25.435204Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:13:25.435300Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:13:25.444531Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1304, node 1 2025-04-17T17:13:25.640754Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:13:25.640774Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:13:25.640780Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:13:25.640876Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65020 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:13:26.335363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:13:29.274122Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494331401740826006:2218];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:13:29.274192Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TopicAutoscaling::PartitionSplit_ManySession_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_AutoscaleAwareSDK >> YdbSdkSessionsPool::FailTest [GOOD] >> Balancing::Balancing_OneTopic_PQv1 [GOOD] >> Balancing::Balancing_ManyTopics_TopicApi >> KqpQueryPerf::Update+QueryService-UseSink >> TopicAutoscaling::ReadingAfterSplitTest_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_PQv1 >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_PQv1 >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKey-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRange+EvWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::FailTest [GOOD] Test command err: 2025-04-17T17:13:14.153628Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494331362787115491:2204];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:13:14.153699Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/0015cb/r3tmp/tmpbvrr2w/pdisk_1.dat 2025-04-17T17:13:15.148408Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:13:15.175335Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:13:15.175454Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:13:15.177028Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:13:15.197307Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15576, node 1 2025-04-17T17:13:15.594055Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:13:15.594077Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:13:15.594082Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:13:15.594172Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5807 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:13:16.083138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:13:19.159041Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494331362787115491:2204];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:13:19.159151Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:13:27.916604Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494331415433864982:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:13:27.916730Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/0015cb/r3tmp/tmptIisZS/pdisk_1.dat 2025-04-17T17:13:28.225067Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:13:28.260442Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:13:28.260548Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:13:28.264746Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20528, node 4 2025-04-17T17:13:28.365820Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:13:28.365840Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:13:28.365852Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:13:28.365980Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23618 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:13:28.647256Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... >> TPQTestSlow::TestOnDiskStoredSourceIds >> DataShardReadIterator::ShouldReadKeyValueColumnAndSomeKeyColumn [GOOD] >> DataShardReadIterator::ShouldRangeReadReverseLeftNonInclusive >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit98 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit99 >> KqpQueryPerf::MultiRead+QueryService >> Cdc::KeysOnlyLog[PqRunner] >> TopicAutoscaling::PartitionSplit_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_DistributedTxCommit |75.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |75.8%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |75.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage >> TopicAutoscaling::ControlPlane_AutoscalingWithStorageSizeRetention [GOOD] >> TopicAutoscaling::CDC_PartitionSplit_AutosplitByLoad >> TExportToS3Tests::ShouldCheckQuotas [GOOD] >> TExportToS3Tests::SchemaMappingEncryptionIncorrectKey >> SystemView::AuthGroups_TableRange [GOOD] >> SystemView::AuthOwners >> SystemView::TopPartitionsByTliFields [GOOD] >> ViewQuerySplit::Basic >> ViewQuerySplit::Basic [GOOD] >> ViewQuerySplit::WithPragmaTablePathPrefix [GOOD] >> ViewQuerySplit::WithPairedPragmaTablePathPrefix [GOOD] >> ViewQuerySplit::WithComments [GOOD] >> ViewQuerySplit::Joins [GOOD] >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWoIndexes >> DataShardReadIterator::ShouldReadNoColumnsKeysRequestArrow [GOOD] >> DataShardReadIterator::ShouldReadNoColumnsRangeRequestCellVec >> TopicAutoscaling::PartitionSplit_PreferedPartition_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_PreferedPartition_AutoscaleAwareSDK >> DataShardReadIterator::ShouldReturnMvccSnapshotFromFuture [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenWriteInSeparateTransactions+EvWrite >> TExportToS3Tests::SchemaMappingEncryptionIncorrectKey [GOOD] |75.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |75.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |75.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> ViewQuerySplit::Joins [GOOD] Test command err: 2025-04-17T17:11:22.324479Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494330881683776416:2267];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:11:22.324525Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/000c3c/r3tmp/tmp6VCpSL/pdisk_1.dat 2025-04-17T17:11:23.984359Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:11:24.016201Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:11:24.016302Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:11:24.024903Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20503, node 1 2025-04-17T17:11:24.416796Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:24.561769Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:11:24.561792Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:11:24.561798Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:11:24.561883Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31115 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:11:24.967118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:11:24.996911Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:11:27.331657Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494330881683776416:2267];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:11:27.789627Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:11:29.591106Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494330911748547925:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:11:29.591243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:11:29.591614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494330911748547961:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:11:29.595271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T17:11:29.604898Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494330911748547963:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T17:11:29.686252Z node 1 :TX_PROXY ERROR: Actor# [1:7494330911748548014:2346] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:11:30.304224Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01js2bqqkr6handns47t1sb276, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODBhYWM1NGMtMjU2NWI1MDYtODhkNjJiY2MtMmVhZmNkNDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:11:30.364552Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7494330916043515353:2348], owner: [1:7494330916043515349:2346], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2025-04-17T17:11:30.369069Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7494330916043515353:2348], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-17T17:11:30.376881Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7494330916043515353:2348], row count: 2, finished: 1 2025-04-17T17:11:30.376936Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7494330916043515353:2348], owner: [1:7494330916043515349:2346], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2025-04-17T17:11:30.384075Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744909890300, txId: 281474976710660] shutting down 2025-04-17T17:11:36.780612Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494330940413461182:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:11:36.780903Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/000c3c/r3tmp/tmprNb6nE/pdisk_1.dat 2025-04-17T17:11:38.681939Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:11:39.627854Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:39.685999Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:11:39.715061Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:11:39.715224Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:11:39.721905Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12588, node 2 2025-04-17T17:11:41.272251Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:11:41.272271Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:11:41.272284Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:11:41.272391Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8211 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:11:41.580100Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:11:41.613170Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:11:41.646334Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7494330960218117237:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:11:41.646392Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:11:41.652156Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7494330962390780080:2078];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:11:41.652250Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:11:41.714577Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:11:41.714656Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:11:41.715015Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494330940413461182:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:11:41.715051Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:11:41.728928Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 6 Cookie 6 2025-04-17 ... 57594037968897, database: /Root/Tenant1, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 2], database node count: 2 2025-04-17T17:13:31.125262Z node 10 :SYSTEM_VIEWS DEBUG: [72075186224037893] Reply batch: range# From { IntervalEndUs: 0 Rank: 0 } InclusiveFrom: true To { IntervalEndUs: 18446744073709551615 Rank: 4294967295 } InclusiveTo: true Type: TOP_PARTITIONS_BY_TLI_ONE_MINUTE , rows# 6, bytes# 378, next# 2025-04-17T17:13:31.134509Z node 7 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [7:7494331433054974721:2462], row count: 6, finished: 1 2025-04-17T17:13:31.134548Z node 7 :SYSTEM_VIEWS INFO: Scan finished, actor: [7:7494331433054974721:2462], owner: [7:7494331428760007409:2460], scan id: 0, table id: [72075186224037888:1:0:top_partitions_by_tli_one_minute] 2025-04-17T17:13:31.362980Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01js2bvhg27qfz3ps10s8ygzp6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MjdiNTU4MjctZWVlZmQwNzQtZDZlYzkyN2UtZDIyYWZmYTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:13:31.369771Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744910010314, txId: 281474976715679] shutting down 2025-04-17T17:13:31.501408Z node 9 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector: TEvProcessOverloaded , top size by CPU # 1, top size by TLI # 1, time# 2025-04-17T17:13:31.501288Z 2025-04-17T17:13:31.502720Z node 9 :SYSTEM_VIEWS DEBUG: [72075186224037899] TTxTopPartitions::Execute: , partition by CPU count# 1, partition by TLI count# 1 2025-04-17T17:13:31.514133Z node 9 :SYSTEM_VIEWS DEBUG: [72075186224037899] TTxAggregate::Execute 2025-04-17T17:13:31.514177Z node 9 :SYSTEM_VIEWS DEBUG: [72075186224037899] PersistQueryResults: interval end# 2025-04-17T17:13:31.000000Z, query count# 0 2025-04-17T17:13:31.514197Z node 9 :SYSTEM_VIEWS DEBUG: [72075186224037899] PersistQueryTopResults: table id# 8, interval end# 2025-04-17T17:13:31.000000Z, query count# 0, persisted# 0 2025-04-17T17:13:31.514215Z node 9 :SYSTEM_VIEWS DEBUG: [72075186224037899] PersistQueryTopResults: table id# 10, interval end# 2025-04-17T17:13:31.000000Z, query count# 0, persisted# 0 2025-04-17T17:13:31.514231Z node 9 :SYSTEM_VIEWS DEBUG: [72075186224037899] PersistQueryTopResults: table id# 12, interval end# 2025-04-17T17:13:31.000000Z, query count# 0, persisted# 0 2025-04-17T17:13:31.514249Z node 9 :SYSTEM_VIEWS DEBUG: [72075186224037899] PersistQueryTopResults: table id# 14, interval end# 2025-04-17T17:13:31.000000Z, query count# 0, persisted# 0 2025-04-17T17:13:31.514267Z node 9 :SYSTEM_VIEWS DEBUG: [72075186224037899] PersistQueryTopResults: table id# 9, interval end# 2025-04-17T18:00:00.000000Z, query count# 0, persisted# 0 2025-04-17T17:13:31.514284Z node 9 :SYSTEM_VIEWS DEBUG: [72075186224037899] PersistQueryTopResults: table id# 11, interval end# 2025-04-17T18:00:00.000000Z, query count# 0, persisted# 0 2025-04-17T17:13:31.514303Z node 9 :SYSTEM_VIEWS DEBUG: [72075186224037899] PersistQueryTopResults: table id# 13, interval end# 2025-04-17T18:00:00.000000Z, query count# 0, persisted# 0 2025-04-17T17:13:31.514320Z node 9 :SYSTEM_VIEWS DEBUG: [72075186224037899] PersistQueryTopResults: table id# 15, interval end# 2025-04-17T18:00:00.000000Z, query count# 0, persisted# 0 2025-04-17T17:13:31.516761Z node 9 :SYSTEM_VIEWS DEBUG: [72075186224037899] TTxTopPartitions::Complete 2025-04-17T17:13:31.529477Z node 9 :SYSTEM_VIEWS DEBUG: [72075186224037899] TTxAggregate::Complete 2025-04-17T17:13:31.568333Z node 10 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxAggregate::Execute 2025-04-17T17:13:31.568385Z node 10 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryResults: interval end# 2025-04-17T17:13:31.000000Z, query count# 0 2025-04-17T17:13:31.568409Z node 10 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 8, interval end# 2025-04-17T17:13:31.000000Z, query count# 0, persisted# 0 2025-04-17T17:13:31.568432Z node 10 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 10, interval end# 2025-04-17T17:13:31.000000Z, query count# 0, persisted# 0 2025-04-17T17:13:31.568452Z node 10 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 12, interval end# 2025-04-17T17:13:31.000000Z, query count# 0, persisted# 0 2025-04-17T17:13:31.568473Z node 10 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 14, interval end# 2025-04-17T17:13:31.000000Z, query count# 0, persisted# 0 2025-04-17T17:13:31.568492Z node 10 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 9, interval end# 2025-04-17T18:00:00.000000Z, query count# 0, persisted# 0 2025-04-17T17:13:31.568515Z node 10 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 11, interval end# 2025-04-17T18:00:00.000000Z, query count# 0, persisted# 0 2025-04-17T17:13:31.568534Z node 10 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 13, interval end# 2025-04-17T18:00:00.000000Z, query count# 0, persisted# 0 2025-04-17T17:13:31.568554Z node 10 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 15, interval end# 2025-04-17T18:00:00.000000Z, query count# 0, persisted# 0 2025-04-17T17:13:31.585657Z node 10 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxAggregate::Complete 2025-04-17T17:13:31.782017Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01js2bvjzsbvgbr8teh7tthm31, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTE5YWExZDQtNzQwMmEwY2QtMjM0NTA4ZmQtZTdmYjBjNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:13:31.786867Z node 7 :SYSTEM_VIEWS INFO: Scan started, actor: [7:7494331433054974769:2474], owner: [7:7494331433054974765:2472], scan id: 0, table id: [72075186224037888:1:0:top_partitions_by_tli_one_minute] 2025-04-17T17:13:31.792958Z node 7 :SYSTEM_VIEWS INFO: Scan prepared, actor: [7:7494331433054974769:2474], schemeshard id: 72075186224037888, hive id: 72057594037968897, database: /Root/Tenant1, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 2], database node count: 2 2025-04-17T17:13:31.793804Z node 10 :SYSTEM_VIEWS DEBUG: [72075186224037893] Reply batch: range# From { IntervalEndUs: 1744910011000000 Rank: 0 } InclusiveFrom: true To { IntervalEndUs: 1744910011000000 Rank: 4294967295 } InclusiveTo: true Type: TOP_PARTITIONS_BY_TLI_ONE_MINUTE , rows# 1, bytes# 63, next# 2025-04-17T17:13:31.794157Z node 7 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [7:7494331433054974769:2474], row count: 1, finished: 1 2025-04-17T17:13:31.794195Z node 7 :SYSTEM_VIEWS INFO: Scan finished, actor: [7:7494331433054974769:2474], owner: [7:7494331433054974765:2472], scan id: 0, table id: [72075186224037888:1:0:top_partitions_by_tli_one_minute] 2025-04-17T17:13:31.806898Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744910011780, txId: 281474976715682] shutting down 2025-04-17T17:13:31.884245Z node 10 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector: TEvProcessOverloaded , top size by CPU # 1, top size by TLI # 1, time# 2025-04-17T17:13:31.882797Z 2025-04-17T17:13:31.884583Z node 10 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxTopPartitions::Execute: , partition by CPU count# 1, partition by TLI count# 1 2025-04-17T17:13:31.900024Z node 8 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [8:7494331346299368720:2074], processor id# 72075186224037899, database# /Root/Tenant2 2025-04-17T17:13:31.899359Z node 9 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [9:7494331346423264017:2196], processor id# 72075186224037899, database# /Root/Tenant2 2025-04-17T17:13:31.899592Z node 10 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [10:7494331328155652195:2120], processor id# 72075186224037893, database# /Root/Tenant1 2025-04-17T17:13:31.931379Z node 11 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [11:7494331326665425620:2073], processor id# 72075186224037893, database# /Root/Tenant1 2025-04-17T17:13:31.909030Z node 8 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [8:7494331346299368720:2074], database# /Root/Tenant2, processor id# 72075186224037899 2025-04-17T17:13:31.913393Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 9 2025-04-17T17:13:31.932047Z node 11 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [11:7494331326665425620:2073], database# /Root/Tenant1, processor id# 72075186224037893 2025-04-17T17:13:31.914209Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T17:13:31.953777Z node 9 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [9:7494331346423264017:2196], database# /Root/Tenant2, processor id# 72075186224037899 2025-04-17T17:13:31.914436Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 10 2025-04-17T17:13:31.915031Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T17:13:31.941340Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 11 2025-04-17T17:13:31.942193Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T17:13:31.948732Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 8 2025-04-17T17:13:31.950176Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T17:13:31.955492Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[8:7494331350594336257:2112], Type=268959746 2025-04-17T17:13:31.955559Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[8:7494331350594336257:2112], Type=268959746 2025-04-17T17:13:31.955598Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[8:7494331350594336257:2112], Type=268959746 2025-04-17T17:13:31.955629Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[8:7494331350594336257:2112], Type=268959746 2025-04-17T17:13:31.955649Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[8:7494331350594336257:2112], Type=268959746 2025-04-17T17:13:31.955670Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[8:7494331350594336257:2112], Type=268959746 2025-04-17T17:13:31.955979Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[11:7494331326665425852:2106], Type=268959746 2025-04-17T17:13:31.956007Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[11:7494331326665425852:2106], Type=268959746 2025-04-17T17:13:31.956031Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[11:7494331326665425852:2106], Type=268959746 2025-04-17T17:13:31.956053Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[11:7494331326665425852:2106], Type=268959746 2025-04-17T17:13:31.956072Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[11:7494331326665425852:2106], Type=268959746 2025-04-17T17:13:31.956095Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[11:7494331326665425852:2106], Type=268959746 2025-04-17T17:13:31.960786Z node 10 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [10:7494331328155652195:2120], database# /Root/Tenant1, processor id# 72075186224037893 2025-04-17T17:13:35.224623Z node 8 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [8:7494331273284924606:2063] >> SystemView::AuthGroups [GOOD] >> SystemView::AuthGroupMembers >> YdbSdkSessionsPool::StressTestSync1 [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadHasRequiredDependencies [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadNoUnnecessaryDependencies >> ResourcePoolsDdl::TestPoolSwitchToLimitedState [GOOD] >> ResourcePoolsDdl::TestDropResourcePool ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::StressTestSync1 [GOOD] Test command err: 2025-04-17T17:13:25.277583Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494331410099291683:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:13:25.277633Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/001367/r3tmp/tmpgnKTXY/pdisk_1.dat 2025-04-17T17:13:26.228360Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:13:26.234966Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:13:26.235047Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:13:26.247308Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27217, node 1 2025-04-17T17:13:26.550815Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:13:26.550837Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:13:26.550844Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:13:26.550958Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22305 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:13:26.971201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:13:30.280317Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494331410099291683:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:13:30.280398Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK_AutoCommit [GOOD] >> TopicAutoscaling::ReBalancingAfterSplit_sessionsWithPartition |75.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::SchemaMappingEncryptionIncorrectKey [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:12:20.599998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:12:20.600248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:12:20.600509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:12:20.600624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:12:20.600865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:12:20.601542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:12:20.601757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:12:20.602003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:12:20.602361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:12:22.547507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:12:22.560259Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:12:22.954790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:12:22.956842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:12:22.958053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:12:23.205832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:12:23.206846Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:12:23.218462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:12:23.218996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:12:23.289016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:12:23.342963Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:12:23.343315Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:12:23.361695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:12:23.362074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:12:23.362991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:12:23.380810Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:12:23.608516Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:12:26.676343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:12:26.677883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:26.700540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:12:26.717112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:12:26.718167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:26.801455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:12:26.803400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:12:26.813101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:26.814027Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:12:26.814317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:12:26.814956Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:12:26.870811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:26.871193Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:12:26.871944Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:12:26.894130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:26.894186Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:26.894862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:12:26.895568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:12:26.973094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:12:27.008623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:12:27.011487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:12:27.056185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:12:27.057182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:12:27.057523Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:12:27.060089Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:12:27.080615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:12:27.083180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:12:27.093374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:12:27.136839Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:12:27.137486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:12:27.157098Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:12:27.157799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:12:27.177004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:12:27.177931Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:12:27.179552Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:12:27.179854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:12:27.188198Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:12:27.188566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:12:27.189269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:12:27.189646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:12:27.190007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:12:27.190407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:12:27.190788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:12:27.191493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:12:27.200169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:12:27.233183Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:12:27.234121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:12:27.234172Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 73Z node 5 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710758 2025-04-17T17:13:37.468619Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-04-17T17:13:37.468677Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-17T17:13:37.470071Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-04-17T17:13:37.470153Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-04-17T17:13:37.470181Z node 5 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710758 2025-04-17T17:13:37.470213Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-04-17T17:13:37.470244Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-17T17:13:37.470323Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2025-04-17T17:13:37.473645Z node 5 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T17:13:37.473962Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710758, at schemeshard: 72057594046678944 2025-04-17T17:13:37.474018Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2025-04-17T17:13:37.474070Z node 5 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710758, at schemeshard: 72057594046678944 2025-04-17T17:13:37.479019Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710758:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710758 msg type: 269090816 2025-04-17T17:13:37.479179Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 4294967295, tablet: 72057594046316545 2025-04-17T17:13:37.481872Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 FAKE_COORDINATOR: Add transaction: 281474976710758 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710758 at step: 5000005 2025-04-17T17:13:37.482400Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:13:37.482544Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710758 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 21474838635 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:13:37.482609Z node 5 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710758:0, step: 5000005, at schemeshard: 72057594046678944 2025-04-17T17:13:37.482752Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710758:0, at schemeshard: 72057594046678944 2025-04-17T17:13:37.482836Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710758:0 progress is 1/1 2025-04-17T17:13:37.482882Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-04-17T17:13:37.482929Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710758:0 progress is 1/1 2025-04-17T17:13:37.482976Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-04-17T17:13:37.483045Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-17T17:13:37.483120Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-17T17:13:37.483164Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 1/1, is published: false 2025-04-17T17:13:37.483215Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-04-17T17:13:37.483262Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710758:0 2025-04-17T17:13:37.483307Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710758:0 2025-04-17T17:13:37.483378Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-17T17:13:37.483423Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710758, publications: 2, subscribers: 1 2025-04-17T17:13:37.483477Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-04-17T17:13:37.483515Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-04-17T17:13:37.484183Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-04-17T17:13:37.489715Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:13:37.489769Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:13:37.489941Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-17T17:13:37.490108Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:13:37.490145Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:205:2207], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 1 2025-04-17T17:13:37.490183Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:205:2207], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710758 2025-04-17T17:13:37.490995Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-04-17T17:13:37.491082Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-04-17T17:13:37.491120Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710758 2025-04-17T17:13:37.491171Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-04-17T17:13:37.491230Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-17T17:13:37.491798Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-04-17T17:13:37.491871Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-04-17T17:13:37.491899Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710758 2025-04-17T17:13:37.491927Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-17T17:13:37.491953Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-17T17:13:37.492035Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710758, subscribers: 1 2025-04-17T17:13:37.492076Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [5:125:2151] 2025-04-17T17:13:37.492700Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:13:37.492760Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-17T17:13:37.492841Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-17T17:13:37.494782Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-04-17T17:13:37.496002Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-04-17T17:13:37.496121Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-17T17:13:37.496195Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710758 2025-04-17T17:13:37.496269Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710758 2025-04-17T17:13:37.496331Z node 5 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T17:13:37.496372Z node 5 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758 2025-04-17T17:13:37.496419Z node 5 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758, id# 103, itemIdx# 4294967295 2025-04-17T17:13:37.497960Z node 5 :EXPORT DEBUG: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 103 2025-04-17T17:13:37.498219Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-17T17:13:37.498268Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-17T17:13:37.498740Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-17T17:13:37.498838Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-17T17:13:37.498895Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [5:550:2509] TestWaitNotification: OK eventTxId 103 >> Cdc::KeysOnlyLog[PqRunner] [GOOD] >> Cdc::KeysOnlyLog[YdsRunner] |75.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |75.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |75.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut >> DataShardReadIterator::ShouldRangeReadReverseLeftNonInclusive [GOOD] >> DataShardReadIterator::ShouldReadKeyPrefix1 >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit99 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit100 >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWoIndexes [GOOD] >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithSyncIndex >> SystemView::DescribeSystemFolder [GOOD] >> SystemView::DescribeAccessDenied >> DstCreator::ColumnsSizeMismatch |75.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |75.9%| [LD] {RESULT} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |75.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut >> TopicAutoscaling::WithDir_PartitionSplit_AutosplitByLoad [GOOD] >> WithSDK::DescribeConsumer >> DstCreator::NonExistentSrc >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRange+EvWrite [GOOD] >> DstCreator::ReplicationModeMismatch >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefix+EvWrite >> TopicAutoscaling::PartitionMerge_PreferedPartition_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionMerge_PreferedPartition_PQv1 >> DataShardReadIterator::ShouldReadNoColumnsRangeRequestCellVec [GOOD] >> DataShardReadIterator::ShouldReadNoColumnsRangeRequestArrow |75.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |75.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |75.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects >> TCdcStreamTests::Basic >> DataShardReadIterator::ShouldReturnBrokenLockWhenWriteInSeparateTransactions+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenWriteInSeparateTransactions-EvWrite |75.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |75.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |75.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query >> KqpQueryPerf::MultiRead+QueryService [GOOD] >> KqpQueryPerf::Update+QueryService-UseSink [GOOD] >> Cdc::KeysOnlyLog[YdsRunner] [GOOD] >> Cdc::KeysOnlyLog[TopicRunner] >> TSchemeShardUserAttrsTest::Boot >> TSchemeShardUserAttrsTest::MkDir >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithSyncIndex [GOOD] >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithAsyncIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiRead+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 15962, MsgBus: 15108 2025-04-17T17:13:34.889922Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494331448476554218:2193];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:13:34.890206Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/001991/r3tmp/tmpEIabIJ/pdisk_1.dat 2025-04-17T17:13:35.664365Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:13:35.667781Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:13:35.667865Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:13:35.682418Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15962, node 1 2025-04-17T17:13:35.931220Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:13:35.931240Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:13:35.931248Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:13:35.931337Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15108 TClient is connected to server localhost:15108 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:13:36.910788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:13:36.936437Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:13:36.951684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:13:37.148669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:13:37.438296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T17:13:37.525596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:13:39.592674Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331469951392355:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:13:39.592800Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:13:39.848088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:13:39.890134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:13:39.892637Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494331448476554218:2193];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:13:39.893424Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:13:39.941967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:13:40.013492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:13:40.057716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:13:40.115426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:13:40.168910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:13:40.278380Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331474246360325:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:13:40.278457Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:13:40.278642Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331474246360330:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:13:40.282297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:13:40.298620Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-04-17T17:13:40.298827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494331474246360332:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:13:40.388784Z node 1 :TX_PROXY ERROR: Actor# [1:7494331474246360387:3555] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> DataShardReadIteratorConsistency::LocalSnapshotReadNoUnnecessaryDependencies [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadWithConcurrentWrites >> DstCreator::NonExistentSrc [GOOD] >> DstCreator::KeyColumnsSizeMismatch ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Update+QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 10658, MsgBus: 8884 2025-04-17T17:13:33.279487Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494331443608274332:2220];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:13:33.283494Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/00160b/r3tmp/tmpbtPP3b/pdisk_1.dat 2025-04-17T17:13:33.765321Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:13:33.765401Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:13:33.776887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10658, node 1 2025-04-17T17:13:33.852763Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:13:34.104596Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:13:34.104624Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:13:34.104631Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:13:34.104732Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8884 TClient is connected to server localhost:8884 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:13:35.354083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:13:35.403965Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:13:35.412293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:13:35.586188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:13:35.849464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:13:35.975346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T17:13:38.141008Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331465083112402:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:13:38.141119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:13:38.279803Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494331443608274332:2220];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:13:38.286764Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:13:39.010541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:13:39.091205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:13:39.168911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:13:39.243082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:13:39.298725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:13:39.385107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:13:39.434904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:13:39.572313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331469378080382:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:13:39.572467Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:13:39.572959Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331469378080388:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:13:39.586928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:13:39.612226Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494331469378080390:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:13:39.730692Z node 1 :TX_PROXY ERROR: Actor# [1:7494331469378080448:3563] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> DstCreator::ColumnsSizeMismatch [GOOD] >> DstCreator::ColumnTypeMismatch >> DstCreator::ReplicationModeMismatch [GOOD] >> DstCreator::ReplicationConsistencyLevelMismatch >> DataShardReadIterator::ShouldReadKeyPrefix1 [GOOD] >> DataShardReadIterator::ShouldReadKeyPrefix2 >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit100 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit101 >> SystemView::QueryStats [GOOD] >> SystemView::PartitionStatsTtlFields >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-false >> YdbSdkSessionsPool::PeriodicTask10 [GOOD] >> TopicAutoscaling::CDC_PartitionSplit_AutosplitByLoad [GOOD] >> TopicAutoscaling::ControlPlane_CDC >> SystemView::AuthUsers_Access [GOOD] >> SystemView::AuthUsers_ResultOrder >> DataShardReadIterator::ShouldReadNoColumnsRangeRequestArrow [GOOD] >> DataShardReadIterator::ShouldReadNonExistingKey >> ResourcePoolsDdl::TestDropResourcePool [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::PeriodicTask10 [GOOD] Test command err: 2025-04-17T17:13:22.516827Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494331393110195326:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:13:22.516871Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/0014cf/r3tmp/tmpEpD0aR/pdisk_1.dat 2025-04-17T17:13:23.650841Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:13:23.712646Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:13:23.712745Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:13:23.721031Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:13:23.740875Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8672, node 1 2025-04-17T17:13:23.853054Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:13:23.853073Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:13:24.023268Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:13:24.023285Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:13:24.023291Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:13:24.023433Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23600 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:13:24.620629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:13:27.523387Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494331393110195326:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:13:27.523461Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:13:38.743495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:13:38.743530Z node 1 :IMPORT WARN: Table profiles were not loaded >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-true >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefix+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefix-EvWrite >> DstCreator::KeyColumnsSizeMismatch [GOOD] >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithAsyncIndex [GOOD] >> AsyncIndexChangeExchange::SenderShouldShakeHandsOnce >> Cdc::KeysOnlyLog[TopicRunner] [GOOD] >> Cdc::KeysOnlyLogDebezium >> LabeledDbCounters::OneTablet [GOOD] >> LabeledDbCounters::OneTabletRemoveCounters >> DstCreator::ReplicationConsistencyLevelMismatch [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-true >> DataShardReadIterator::ShouldReturnBrokenLockWhenWriteInSeparateTransactions-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRange-EvWrite >> TConsoleTests::TestCreateTenant >> TConfigsCacheTests::TestNoNotificationIfConfigIsCached ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolsDdl::TestDropResourcePool [GOOD] Test command err: 2025-04-17T17:11:39.659312Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494330954905211990:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:11:39.659436Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/002028/r3tmp/tmpooKTE7/pdisk_1.dat 2025-04-17T17:11:41.356252Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:11:41.622930Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:41.627731Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:11:41.627806Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:11:41.634409Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12255, node 1 2025-04-17T17:11:41.923286Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:11:41.923318Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:11:41.923325Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:11:41.923433Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29620 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:11:42.547583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:11:44.647646Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494330954905211990:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:11:44.647958Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:11:54.180836Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-17T17:11:54.181468Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494331019329722120:2348], Start check tables existence, number paths: 2 2025-04-17T17:11:54.273000Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjllMjNjZTItNmJhOTI3NjctMTEyZWQyY2ItYjk5YmU3N2I=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZjllMjNjZTItNmJhOTI3NjctMTEyZWQyY2ItYjk5YmU3N2I= 2025-04-17T17:11:54.273252Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjllMjNjZTItNmJhOTI3NjctMTEyZWQyY2ItYjk5YmU3N2I=, ActorId: [1:7494331019329722136:2349], ActorState: unknown state, session actor bootstrapped 2025-04-17T17:11:54.274904Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-04-17T17:11:54.274918Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-17T17:11:54.274929Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-17T17:11:54.280418Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494331019329722120:2348], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-17T17:11:54.280712Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494331019329722120:2348], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-17T17:11:54.281014Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494331019329722120:2348], Successfully finished 2025-04-17T17:11:54.535494Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-17T17:11:54.740962Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494331019329722142:2340], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-17T17:11:54.822035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T17:11:54.845165Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494331019329722142:2340], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-04-17T17:11:54.845296Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494331019329722142:2340], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-04-17T17:11:55.015220Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494331019329722142:2340], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T17:11:55.076609Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494331019329722142:2340], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-17T17:11:55.111158Z node 1 :TX_PROXY ERROR: Actor# [1:7494331023624689490:2373] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:11:55.111589Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494331019329722142:2340], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-04-17T17:11:55.376172Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-04-17T17:11:55.376191Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-04-17T17:11:55.376853Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjllMjNjZTItNmJhOTI3NjctMTEyZWQyY2ItYjk5YmU3N2I=, ActorId: [1:7494331019329722136:2349], ActorState: ReadyState, TraceId: 01js2brn7cetv6y5hbx2qhadn3, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: CREATE RESOURCE POOL my_pool WITH ( CONCURRENT_QUERY_LIMIT=1, QUEUE_SIZE=0 ); rpcActor: [0:0:0] database: /Root databaseId: /Root pool id: default 2025-04-17T17:11:55.407351Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331023624689501:2354], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-04-17T17:11:55.684432Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331023624689501:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:11:55.684768Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:11:56.121951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T17:11:56.128071Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=ZjllMjNjZTItNmJhOTI3NjctMTEyZWQyY2ItYjk5YmU3N2I=, ActorId: [1:7494331019329722136:2349], ActorState: ExecuteState, TraceId: 01js2brn7cetv6y5hbx2qhadn3, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [1:7494331023624689502:2349] WorkloadServiceCleanup: 0 2025-04-17T17:11:56.129414Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjllMjNjZTItNmJhOTI3NjctMTEyZWQyY2ItYjk5YmU3N2I=, ActorId: [1:7494331019329722136:2349], ActorState: CleanupState, TraceId: 01js2brn7cetv6y5hbx2qhadn3, EndCleanup, isFinal: 0 2025-04-17T17:11:56.129462Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjllMjNjZTItNmJhOTI3NjctMTEyZWQyY2ItYjk5YmU3N2I=, ActorId: [1:7494331019329722136:2349], ActorState: CleanupState, TraceId: 01js2brn7cetv6y5hbx2qhadn3, Sent query response back to proxy, proxyRequestId: 3, proxyId: [1:7494330954905212217:2277] 2025-04-17T17:11:56.134587Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NmY0ZDMwZGUtYjIwZTdlYmMtMTIzN2E3NmMtOGUxNTFhYjQ=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NmY0ZDMwZGUtYjIwZTdlYmMtMTIzN2E3NmMtOGUxNTFhYjQ= 2025-04-17T17:11:56.134865Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: my_pool 2025-04-17T17:11:56.134896Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NmY0ZDMwZGUtYjIwZTdlYmMtMTIzN2E3NmMtOGUxNTFhYjQ=, ActorId: [1:7494331027919656833:2357], ActorState: unknown state, session actor bootstrapped 2025-04-17T17:11:56.135030Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NmY0ZDMwZGUtYjIwZTdlYmMtMTIzN2E3NmMtOGUxNTFhYjQ=, ActorId: [1:7494331027919656833:2357], ActorState: ReadyState, TraceId: 01js2brnz62d539dy3jjk1rqhy, received request, proxyRequestId: 4 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7494331027919656832:2404] database: Root databaseId: /Root pool id: my_pool 2025-04-17T17:11:56.135062Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7494331027919656833:2357], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=1&id=NmY0ZDMwZGUtYjIwZTdlYmMtMTIzN2E3NmMtOGUxNTFhYjQ= 2025-04-17T17:11:56.135100Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331027919656835:2358], DatabaseId: /Root, PoolId: my_pool, Start pool fetching 2025-04-17T17:11:56.135141Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7494331 ... ontrol: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: DROP RESOURCE POOL my_pool; rpcActor: [0:0:0] database: /Root databaseId: /Root pool id: default 2025-04-17T17:13:49.496704Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: my_pool, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=8&id=NGVkZmVmZjUtY2VhOWYxMjEtM2IzOWI4ZjYtZWM4OTIxM2I=, TxId: 2025-04-17T17:13:49.496836Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: my_pool, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, Finish with SUCCESS, SessionId: ydb://session/3?node_id=8&id=NGVkZmVmZjUtY2VhOWYxMjEtM2IzOWI4ZjYtZWM4OTIxM2I=, TxId: 2025-04-17T17:13:49.497085Z node 8 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolHandlerActorBase] ActorId: [8:7494331493590626924:2337], DatabaseId: /Root, PoolId: my_pool, succefully refreshed pool state, in flight: 0, delayed: 0 2025-04-17T17:13:49.497119Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=NGVkZmVmZjUtY2VhOWYxMjEtM2IzOWI4ZjYtZWM4OTIxM2I=, ActorId: [8:7494331510770496591:2443], ActorState: ReadyState, Session closed due to explicit close event 2025-04-17T17:13:49.497154Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=NGVkZmVmZjUtY2VhOWYxMjEtM2IzOWI4ZjYtZWM4OTIxM2I=, ActorId: [8:7494331510770496591:2443], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-17T17:13:49.497182Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NGVkZmVmZjUtY2VhOWYxMjEtM2IzOWI4ZjYtZWM4OTIxM2I=, ActorId: [8:7494331510770496591:2443], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-17T17:13:49.497207Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NGVkZmVmZjUtY2VhOWYxMjEtM2IzOWI4ZjYtZWM4OTIxM2I=, ActorId: [8:7494331510770496591:2443], ActorState: unknown state, Cleanup temp tables: 0 2025-04-17T17:13:49.497271Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NGVkZmVmZjUtY2VhOWYxMjEtM2IzOWI4ZjYtZWM4OTIxM2I=, ActorId: [8:7494331510770496591:2443], ActorState: unknown state, Session actor destroyed 2025-04-17T17:13:49.515848Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [8:7494331493590626924:2337], DatabaseId: /Root, PoolId: my_pool, Got delete notification 2025-04-17T17:13:49.515948Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: my_pool 2025-04-17T17:13:49.516007Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7494331510770496650:2457], DatabaseId: /Root, PoolId: my_pool, Start pool fetching 2025-04-17T17:13:49.516851Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7494331510770496650:2457], DatabaseId: /Root, PoolId: my_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-04-17T17:13:49.516957Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool my_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-04-17T17:13:49.521448Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=ZmU4NThiNjMtYzI1ZWMwOTEtYWM1OGFkN2MtNTNkMGNjZjA=, ActorId: [8:7494331489295659514:2330], ActorState: ExecuteState, TraceId: 01js2bw4nhbgtr2nqp2tw4160x, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [8:7494331510770496640:2330] WorkloadServiceCleanup: 0 2025-04-17T17:13:49.523301Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZmU4NThiNjMtYzI1ZWMwOTEtYWM1OGFkN2MtNTNkMGNjZjA=, ActorId: [8:7494331489295659514:2330], ActorState: CleanupState, TraceId: 01js2bw4nhbgtr2nqp2tw4160x, EndCleanup, isFinal: 0 2025-04-17T17:13:49.523352Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZmU4NThiNjMtYzI1ZWMwOTEtYWM1OGFkN2MtNTNkMGNjZjA=, ActorId: [8:7494331489295659514:2330], ActorState: CleanupState, TraceId: 01js2bw4nhbgtr2nqp2tw4160x, Sent query response back to proxy, proxyRequestId: 18, proxyId: [8:7494331467820822389:2065] 2025-04-17T17:13:49.531402Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=N2IxZDAzMDAtNTZmM2I2ZmMtODcwYjFlZS00YmI5ODM4ZA==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id N2IxZDAzMDAtNTZmM2I2ZmMtODcwYjFlZS00YmI5ODM4ZA== 2025-04-17T17:13:49.532289Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=N2IxZDAzMDAtNTZmM2I2ZmMtODcwYjFlZS00YmI5ODM4ZA==, ActorId: [8:7494331510770496664:2458], ActorState: unknown state, session actor bootstrapped 2025-04-17T17:13:49.534712Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: my_pool 2025-04-17T17:13:49.534842Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=N2IxZDAzMDAtNTZmM2I2ZmMtODcwYjFlZS00YmI5ODM4ZA==, ActorId: [8:7494331510770496664:2458], ActorState: ReadyState, TraceId: 01js2bw4pybbcs63f3ar6nbzjk, received request, proxyRequestId: 19 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [8:7494331510770496663:2665] database: Root databaseId: /Root pool id: my_pool 2025-04-17T17:13:49.534888Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [8:7494331510770496664:2458], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=8&id=N2IxZDAzMDAtNTZmM2I2ZmMtODcwYjFlZS00YmI5ODM4ZA== 2025-04-17T17:13:49.534938Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7494331510770496666:2459], DatabaseId: /Root, PoolId: my_pool, Start pool fetching 2025-04-17T17:13:49.535082Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [8:7494331510770496667:2460], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=8&id=N2IxZDAzMDAtNTZmM2I2ZmMtODcwYjFlZS00YmI5ODM4ZA==, Start pool fetching 2025-04-17T17:13:49.535113Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7494331510770496668:2461], DatabaseId: /Root, PoolId: my_pool, Start pool fetching 2025-04-17T17:13:49.535491Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7494331510770496668:2461], DatabaseId: /Root, PoolId: my_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-04-17T17:13:49.535628Z node 8 :KQP_WORKLOAD_SERVICE ERROR: [WorkloadService] [TPoolResolverActor] ActorId: [8:7494331510770496667:2460], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=8&id=N2IxZDAzMDAtNTZmM2I2ZmMtODcwYjFlZS00YmI5ODM4ZA==, Failed to fetch pool info NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-04-17T17:13:49.535744Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolResolverActor] ActorId: [8:7494331510770496667:2460], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=8&id=N2IxZDAzMDAtNTZmM2I2ZmMtODcwYjFlZS00YmI5ODM4ZA==, Failed to resolve pool, NOT_FOUND, issues: {
: Error: Failed to resolve pool id my_pool subissue: {
: Error: Resource pool my_pool not found or you don't have access permissions } } 2025-04-17T17:13:49.535763Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7494331510770496666:2459], DatabaseId: /Root, PoolId: my_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-04-17T17:13:49.535875Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply continue error NOT_FOUND to [8:7494331510770496664:2458]: {
: Error: Failed to resolve pool id my_pool subissue: {
: Error: Resource pool my_pool not found or you don't have access permissions } } 2025-04-17T17:13:49.535948Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool my_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-04-17T17:13:49.536055Z node 8 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=8&id=N2IxZDAzMDAtNTZmM2I2ZmMtODcwYjFlZS00YmI5ODM4ZA==, ActorId: [8:7494331510770496664:2458], ActorState: ExecuteState, TraceId: 01js2bw4pybbcs63f3ar6nbzjk, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool 2025-04-17T17:13:49.536226Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=N2IxZDAzMDAtNTZmM2I2ZmMtODcwYjFlZS00YmI5ODM4ZA==, ActorId: [8:7494331510770496664:2458], ActorState: ExecuteState, TraceId: 01js2bw4pybbcs63f3ar6nbzjk, Cleanup start, isFinal: 1 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 1 2025-04-17T17:13:49.536300Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Finished request with worker actor [8:7494331510770496664:2458], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=8&id=N2IxZDAzMDAtNTZmM2I2ZmMtODcwYjFlZS00YmI5ODM4ZA== 2025-04-17T17:13:49.536391Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=N2IxZDAzMDAtNTZmM2I2ZmMtODcwYjFlZS00YmI5ODM4ZA==, ActorId: [8:7494331510770496664:2458], ActorState: CleanupState, TraceId: 01js2bw4pybbcs63f3ar6nbzjk, EndCleanup, isFinal: 1 2025-04-17T17:13:49.536520Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=N2IxZDAzMDAtNTZmM2I2ZmMtODcwYjFlZS00YmI5ODM4ZA==, ActorId: [8:7494331510770496664:2458], ActorState: CleanupState, TraceId: 01js2bw4pybbcs63f3ar6nbzjk, Sent query response back to proxy, proxyRequestId: 19, proxyId: [8:7494331467820822389:2065] 2025-04-17T17:13:49.536556Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=N2IxZDAzMDAtNTZmM2I2ZmMtODcwYjFlZS00YmI5ODM4ZA==, ActorId: [8:7494331510770496664:2458], ActorState: unknown state, TraceId: 01js2bw4pybbcs63f3ar6nbzjk, Cleanup temp tables: 0 2025-04-17T17:13:49.536693Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=N2IxZDAzMDAtNTZmM2I2ZmMtODcwYjFlZS00YmI5ODM4ZA==, ActorId: [8:7494331510770496664:2458], ActorState: unknown state, TraceId: 01js2bw4pybbcs63f3ar6nbzjk, Session actor destroyed 2025-04-17T17:13:49.553568Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=ZmU4NThiNjMtYzI1ZWMwOTEtYWM1OGFkN2MtNTNkMGNjZjA=, ActorId: [8:7494331489295659514:2330], ActorState: ReadyState, Session closed due to explicit close event 2025-04-17T17:13:49.553664Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=ZmU4NThiNjMtYzI1ZWMwOTEtYWM1OGFkN2MtNTNkMGNjZjA=, ActorId: [8:7494331489295659514:2330], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-17T17:13:49.553700Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZmU4NThiNjMtYzI1ZWMwOTEtYWM1OGFkN2MtNTNkMGNjZjA=, ActorId: [8:7494331489295659514:2330], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-17T17:13:49.553741Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZmU4NThiNjMtYzI1ZWMwOTEtYWM1OGFkN2MtNTNkMGNjZjA=, ActorId: [8:7494331489295659514:2330], ActorState: unknown state, Cleanup temp tables: 0 2025-04-17T17:13:49.553882Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZmU4NThiNjMtYzI1ZWMwOTEtYWM1OGFkN2MtNTNkMGNjZjA=, ActorId: [8:7494331489295659514:2330], ActorState: unknown state, Session actor destroyed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::KeyColumnsSizeMismatch [GOOD] Test command err: 2025-04-17T17:13:43.184327Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494331483899316620:2141];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:13:43.184620Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/0010fe/r3tmp/tmpcUgUsG/pdisk_1.dat 2025-04-17T17:13:43.606981Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:13:43.624822Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:13:43.624891Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:13:43.626435Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19089 TServer::EnableGrpc on GrpcPort 2231, node 1 2025-04-17T17:13:43.883895Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:13:43.883917Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:13:43.883923Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:13:43.884050Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19089 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:13:44.201513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root 2025-04-17T17:13:44.222260Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744910024263 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744910024263 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version... (TRUNCATED) 2025-04-17T17:13:44.237599Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T17:13:44.237733Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T17:13:44.237742Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-17T17:13:44.242196Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-17T17:13:46.113921Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { status: SCHEME_ERROR, issues: } } 2025-04-17T17:13:46.113975Z node 1 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Cannot describe table: status: SCHEME_ERROR, issue: 2025-04-17T17:13:46.796680Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494331497414328397:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:13:46.796711Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/0010fe/r3tmp/tmpQw2b8P/pdisk_1.dat 2025-04-17T17:13:46.990104Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:13:46.990197Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:13:47.002079Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:13:47.035838Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29779 TServer::EnableGrpc on GrpcPort 25117, node 2 2025-04-17T17:13:47.232548Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:13:47.232567Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:13:47.232572Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:13:47.232645Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29779 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:13:47.629571Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:13:47.638488Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:13:47.818222Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744910027679 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744910027889 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744910027679 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744910027889 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-04-17T17:13:47.863779Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T17:13:47.863944Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T17:13:47.863956Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-17T17:13:47.864605Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-17T17:13:50.152188Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744910027763, tx_id: 281474976715658 } } } 2025-04-17T17:13:50.152463Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-17T17:13:50.153789Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-04-17T17:13:50.155411Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744910027889 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnNames: "value" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 3 PathOwnerId: 72057594046644480 2025-04-17T17:13:50.155586Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Key columns size mismatch: expected: 1, got: 2 >> DstCreator::ColumnTypeMismatch [GOOD] >> TSchemeShardUserAttrsTest::Boot [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::ReplicationConsistencyLevelMismatch [GOOD] Test command err: 2025-04-17T17:13:43.529889Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494331483520168862:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:13:43.529931Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/0010fa/r3tmp/tmpcRIcCN/pdisk_1.dat 2025-04-17T17:13:44.069769Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:13:44.091155Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:13:44.091239Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:13:44.097690Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26549 TServer::EnableGrpc on GrpcPort 16858, node 1 2025-04-17T17:13:44.362536Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:13:44.362563Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:13:44.362576Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:13:44.362897Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26549 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:13:44.778480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:13:44.800488Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:13:44.807328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:13:44.925291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744910024837 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744910025012 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744910024837 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744910025012 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-04-17T17:13:44.977600Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T17:13:44.977770Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T17:13:44.977784Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-17T17:13:44.978444Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-17T17:13:46.761353Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744910024914, tx_id: 281474976710658 } } } 2025-04-17T17:13:46.761708Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-17T17:13:46.762933Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-04-17T17:13:46.767192Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744910025012 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_NONE ConsistencyLevel: CONSISTENCY_LEVEL_UNKNOWN } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIo ... 968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12678 TServer::EnableGrpc on GrpcPort 9910, node 2 2025-04-17T17:13:48.092690Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:13:48.092711Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:13:48.092717Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:13:48.092821Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12678 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:13:48.674229Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:13:48.681053Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:13:48.686454Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:13:48.725968Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744910028722 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744910028806 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744910028722 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744910028806 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-04-17T17:13:48.777422Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T17:13:48.777524Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T17:13:48.777535Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-17T17:13:48.777966Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-17T17:13:51.052597Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744910028757, tx_id: 281474976710658 } } } 2025-04-17T17:13:51.052878Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-17T17:13:51.054020Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-04-17T17:13:51.054737Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744910028806 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_GLOBAL } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 3 PathOwnerId: 72057594046644480 2025-04-17T17:13:51.054892Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Replication consistency level mismatch: expected: CONSISTENCY_LEVEL_ROW, got: 1 >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-false >> SystemView::TopPartitionsByCpuFollowers [GOOD] >> SystemView::TabletsFields >> Balancing::Balancing_ManyTopics_TopicApi [GOOD] >> Balancing::Balancing_ManyTopics_PQv1 >> TSchemeShardUserAttrsTest::MkDir [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::ColumnTypeMismatch [GOOD] Test command err: 2025-04-17T17:13:42.597923Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494331479626381126:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:13:42.597961Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/001117/r3tmp/tmpaa4dPw/pdisk_1.dat 2025-04-17T17:13:43.102135Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:13:43.102246Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:13:43.113471Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:13:43.239742Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:3024 TServer::EnableGrpc on GrpcPort 12941, node 1 2025-04-17T17:13:43.672304Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:13:43.672322Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:13:43.672328Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:13:43.672433Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3024 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:13:44.297832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:13:44.311291Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:13:44.315671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:13:44.450523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744910024354 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744910024522 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744910024354 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744910024522 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-04-17T17:13:44.487895Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T17:13:44.488062Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T17:13:44.488092Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-17T17:13:44.488768Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-17T17:13:46.700254Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744910024438, tx_id: 281474976710658 } } } 2025-04-17T17:13:46.700627Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-17T17:13:46.702036Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-04-17T17:13:46.703935Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744910024522 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "extra" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics ... RN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16649 TServer::EnableGrpc on GrpcPort 30389, node 2 2025-04-17T17:13:48.304911Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:13:48.304933Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:13:48.304941Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:13:48.305058Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16649 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:13:48.753404Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:13:48.760073Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T17:13:48.762529Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:13:48.846826Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744910028799 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744910028932 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744910028799 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744910028932 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-04-17T17:13:48.895564Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T17:13:48.895694Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T17:13:48.895710Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-17T17:13:48.896156Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-17T17:13:51.258055Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744910028876, tx_id: 281474976715658 } } } 2025-04-17T17:13:51.258336Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-17T17:13:51.259726Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-04-17T17:13:51.260633Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744910028932 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 3 PathOwnerId: 72057594046644480 2025-04-17T17:13:51.260807Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Column type mismatch: name: value, expected: Utf8, got: Uint32 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::Boot [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:13:51.089665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:13:51.111390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:13:51.111465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:13:51.114730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:13:51.114807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:13:51.114830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:13:51.114884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:13:51.120886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:13:51.132677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:13:51.411806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:13:51.411867Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:13:51.476184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:13:51.476319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:13:51.476424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:13:51.502047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:13:51.502467Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:13:51.523183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:13:51.553933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:13:51.562752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:13:51.656864Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:13:51.656992Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:13:51.669006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:13:51.678824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:13:51.678996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:13:51.692479Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:13:51.737565Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:13:51.960304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:13:51.976497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:51.988167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:13:51.988584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:13:51.988711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:52.021648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:13:52.028405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:13:52.028812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:52.028883Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:13:52.028926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:13:52.028976Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:13:52.037265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:52.037352Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:13:52.037399Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:13:52.042697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:52.042759Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:52.042814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:13:52.042902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:13:52.050200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:13:52.065342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:13:52.072237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:13:52.073443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:13:52.073615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:13:52.073670Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:13:52.086841Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:13:52.086961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:13:52.087203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:13:52.087333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:13:52.090711Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:13:52.090778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:13:52.091008Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:13:52.091058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:13:52.112420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:52.120680Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:13:52.120906Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:13:52.120950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:13:52.121004Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:13:52.121052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:13:52.121095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:13:52.121159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:13:52.121221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:13:52.121249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:13:52.121348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:13:52.121385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:13:52.121425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:13:52.123566Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:13:52.123721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:13:52.123765Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-17T17:13:52.123832Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-17T17:13:52.123871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:13:52.123994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-17T17:13:52.134890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-17T17:13:52.148694Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-true >> TopicAutoscaling::ReadingAfterSplitTest_PQv1 [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_BeforeAutoscaleAwareSDK >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-true >> SystemView::AuthOwners [GOOD] >> SystemView::AuthOwners_Access >> TSchemeShardExtSubDomainTest::CreateItemsInsideExtSubdomainAtGSSwithoutTSS ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::MkDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:13:51.090739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:13:51.111394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:13:51.111470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:13:51.114390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:13:51.114485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:13:51.114514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:13:51.114597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:13:51.120305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:13:51.132311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:13:51.417489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:13:51.417548Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:13:51.473543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:13:51.473739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:13:51.473873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:13:51.501793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:13:51.502167Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:13:51.522863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:13:51.552531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:13:51.562274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:13:51.660371Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:13:51.660476Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:13:51.672343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:13:51.680182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:13:51.680303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:13:51.692495Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:13:51.729467Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:13:51.960249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:13:51.976428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:51.988096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:13:51.988481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:13:51.988585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:52.008156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:13:52.028363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:13:52.028769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:52.028841Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:13:52.028881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:13:52.028925Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:13:52.033529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:52.033611Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:13:52.033650Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:13:52.037345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:52.037406Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:52.037465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:13:52.037522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:13:52.041221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:13:52.061591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:13:52.072234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:13:52.073434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:13:52.073581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:13:52.073627Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:13:52.088389Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:13:52.088473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:13:52.088677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:13:52.088779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:13:52.097213Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:13:52.097272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:13:52.097484Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:13:52.097528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:13:52.112746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:52.124255Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:13:52.124408Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:13:52.124445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:13:52.124488Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:13:52.124532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:13:52.124571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:13:52.124637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:13:52.124673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:13:52.124713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:13:52.124781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:13:52.124821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:13:52.124857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:13:52.126874Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:13:52.126992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:13:52.127030Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... .844417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-17T17:13:52.845122Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-17T17:13:52.845281Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-17T17:13:52.845322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-17T17:13:52.845368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:383:2374] 2025-04-17T17:13:52.845515Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-17T17:13:52.845546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T17:13:52.845566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:383:2374] 2025-04-17T17:13:52.845678Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-17T17:13:52.845709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T17:13:52.845744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:383:2374] 2025-04-17T17:13:52.845841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-17T17:13:52.845858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:383:2374] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2025-04-17T17:13:52.846313Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:13:52.846479Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 195us result status StatusSuccess 2025-04-17T17:13:52.846918Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:13:52.938541Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:13:52.938784Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 225us result status StatusSuccess 2025-04-17T17:13:52.939104Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 } ChildrenExist: true } Children { Name: "SubDirA" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrA1" Value: "ValA1" } UserAttributes { Key: "AttrA2" Value: "ValA2" } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:13:52.939756Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:13:52.939933Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB" took 175us result status StatusSuccess 2025-04-17T17:13:52.940246Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB" PathDescription { Self { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrB1" Value: "ValB1" } UserAttributes { Key: "AttrB2" Value: "ValB2" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:13:52.940775Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/SubDirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:13:52.940915Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/SubDirA" took 147us result status StatusSuccess 2025-04-17T17:13:52.941201Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/SubDirA" PathDescription { Self { Name: "SubDirA" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "DirB" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000005 ParentPathId: 4 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrAA1" Value: "ValAA1" } UserAttributes { Key: "AttrAA2" Value: "ValAA2" } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:13:52.941774Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/SubDirA/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:13:52.941960Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/SubDirA/DirB" took 187us result status StatusSuccess 2025-04-17T17:13:52.942233Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/SubDirA/DirB" PathDescription { Self { Name: "DirB" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000005 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrAB1" Value: "ValAB1" } UserAttributes { Key: "AttrAB2" Value: "ValAB2" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst >> YdbSdkSessionsPool::PeriodicTask1 [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadWithConcurrentWrites [GOOD] >> DataShardReadIteratorConsistency::Bug_7674_IteratorDuplicateRows >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-false >> DataShardReadIterator::ShouldReadKeyPrefix2 [GOOD] >> DataShardReadIterator::ShouldReadKeyPrefix3 >> KqpScanArrowFormat::AllTypesColumns >> TCdcStreamTests::Basic [GOOD] >> TCdcStreamTests::Attributes >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateItemsInsideExtSubdomainAtGSSwithoutTSS [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-false >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_BeforeAutoscaleAwareSDK >> KqpScanArrowInChanels::AllTypesColumns >> KqpScanArrowFormat::SingleKey >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit101 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit198 >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-true ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::PeriodicTask1 [GOOD] Test command err: 2025-04-17T17:13:22.749363Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494331393703424263:2280];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:13:22.749405Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/001448/r3tmp/tmpDy9pSP/pdisk_1.dat 2025-04-17T17:13:23.730838Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:13:23.776450Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:13:23.778867Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:13:23.778951Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:13:23.794447Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26186, node 1 2025-04-17T17:13:24.089674Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:13:24.089698Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:13:24.089704Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:13:24.089788Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28120 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:13:24.849561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:13:27.749486Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494331393703424263:2280];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:13:27.749552Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:13:29.372426Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494331425651428208:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:13:29.372482Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/001448/r3tmp/tmprYVlFZ/pdisk_1.dat 2025-04-17T17:13:29.605966Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21834, node 4 2025-04-17T17:13:29.725201Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:13:29.725284Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:13:29.788669Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:13:29.788689Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:13:29.788696Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:13:29.788773Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:13:29.791446Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26274 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:13:30.193556Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:13:34.378747Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494331425651428208:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:13:34.378816Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:13:44.580209Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:13:44.580242Z node 4 :IMPORT WARN: Table profiles were not loaded >> SystemView::AuthGroupMembers [GOOD] >> SystemView::AuthGroupMembers_Access >> KqpScanArrowInChanels::AggregateNoColumn >> Cdc::KeysOnlyLogDebezium [GOOD] >> Cdc::NewAndOldImagesLog[PqRunner] >> SystemView::DescribeAccessDenied [GOOD] >> SystemView::CollectScanQueries >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-true >> TopicAutoscaling::PartitionSplit_ManySession_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_DistributedTxCommit_ChildFirst >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-false >> TCdcStreamTests::Attributes [GOOD] >> TCdcStreamTests::DocApi ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:13:49.656455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:13:49.656696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:13:49.656742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:13:49.656798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:13:49.656839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:13:49.656866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:13:49.656920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:13:49.657007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:13:49.657356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:13:49.722374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:13:49.722435Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:13:49.757088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:13:49.757251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:13:49.757416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:13:49.769388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:13:49.769736Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:13:49.770408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:13:49.770751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:13:49.775541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:13:49.776672Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:13:49.776733Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:13:49.776829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:13:49.776886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:13:49.776926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:13:49.777120Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:13:49.783477Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:13:49.942247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:13:49.942498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:49.942702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:13:49.942993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:13:49.943052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:49.961094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:13:49.961285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:13:49.961511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:49.961578Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:13:49.961629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:13:49.961666Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:13:49.965861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:49.965927Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:13:49.965969Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:13:49.972987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:49.973051Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:49.973092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:13:49.973165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:13:49.976499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:13:49.979476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:13:49.979671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:13:49.980530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:13:49.980659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:13:49.980705Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:13:49.980956Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:13:49.980997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:13:49.981131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:13:49.981196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:13:49.983553Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:13:49.983621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:13:49.983818Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:13:49.983855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:13:49.984195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:49.984246Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:13:49.984332Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:13:49.984363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:13:49.984401Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:13:49.984428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:13:49.984477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:13:49.984529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:13:49.984561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:13:49.984589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:13:49.984659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:13:49.984696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:13:49.984724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:13:49.986518Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:13:49.986632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:13:49.986667Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Z node 7 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [7:125:2151] sender: [7:240:2058] recipient: [7:15:2062] 2025-04-17T17:13:55.965428Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:13:55.965640Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:55.965862Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:13:55.966093Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:13:55.966148Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:55.968967Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:13:55.969109Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:13:55.969333Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:55.969406Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:13:55.969454Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:13:55.969499Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:13:55.972273Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:55.972348Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:13:55.972411Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:13:55.974520Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:55.974582Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:55.974638Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:13:55.974705Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:13:55.974865Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:13:55.986005Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:13:55.986287Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:13:55.987338Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:13:55.987518Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 30064773228 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:13:55.987581Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:13:55.987925Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:13:55.988005Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:13:55.991669Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:13:55.991846Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:13:56.005816Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:13:56.005901Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:13:56.006148Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:13:56.006212Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:13:56.006672Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:56.006746Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:13:56.006900Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:13:56.006951Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:13:56.007011Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:13:56.007055Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:13:56.007117Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:13:56.007181Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:13:56.007231Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:13:56.007273Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:13:56.007370Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:13:56.007440Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:13:56.007495Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:13:56.010088Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:13:56.010270Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:13:56.010319Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-17T17:13:56.010384Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-17T17:13:56.010456Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:13:56.010599Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-17T17:13:56.019342Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-17T17:13:56.019997Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-17T17:13:56.024030Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "USER_1" ExternalSchemeShard: true } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:13:56.024265Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 101:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "USER_1" ExternalSchemeShard: true } 2025-04-17T17:13:56.024326Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 101:0, path /MyRoot/USER_1 2025-04-17T17:13:56.024508Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 101:0, explain: Invalid AlterExtSubDomain request: Check failed: path: '/MyRoot/USER_1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), at schemeshard: 72057594046678944 2025-04-17T17:13:56.024569Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusPathDoesNotExist, reason: Invalid AlterExtSubDomain request: Check failed: path: '/MyRoot/USER_1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), at schemeshard: 72057594046678944 2025-04-17T17:13:56.024988Z node 7 :TX_PROXY DEBUG: actor# [7:270:2261] Bootstrap 2025-04-17T17:13:56.052855Z node 7 :TX_PROXY DEBUG: actor# [7:270:2261] Become StateWork (SchemeCache [7:275:2266]) 2025-04-17T17:13:56.054439Z node 7 :TX_PROXY DEBUG: actor# [7:270:2261] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T17:13:56.060821Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusPathDoesNotExist Reason: "Invalid AlterExtSubDomain request: Check failed: path: \'/MyRoot/USER_1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:13:56.061032Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Invalid AlterExtSubDomain request: Check failed: path: '/MyRoot/USER_1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), operation: ALTER DATABASE, path: /MyRoot/USER_1 2025-04-17T17:13:56.061804Z node 7 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> DataShardReadIterator::ShouldReadNonExistingKey [GOOD] >> DataShardReadIterator::ShouldReadMultipleKeys >> AsyncIndexChangeExchange::SenderShouldShakeHandsOnce [GOOD] >> AsyncIndexChangeExchange::SenderShouldShakeHandsTwice >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-ExternalHive >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-true |76.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |76.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |76.0%| [LD] {RESULT} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut >> KqpLimits::TooBigQuery+useSink >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-true >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefix-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixLeftBorder+EvWrite >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst >> KqpQuery::RewriteIfPresentToMap >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-true [GOOD] |76.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |76.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |76.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-false >> TCdcStreamTests::DocApi [GOOD] >> TCdcStreamTests::DocApiNegative >> TConfigsCacheTests::TestNoNotificationIfConfigIsCached [GOOD] >> TConfigsCacheTests::TestFullConfigurationRestore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:13:49.833198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:13:49.833290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:13:49.833336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:13:49.833375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:13:49.833414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:13:49.833440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:13:49.833488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:13:49.833560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:13:49.833841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:13:49.965498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:13:49.965552Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:13:49.981065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:13:49.981161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:13:49.981307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:13:49.987400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:13:49.987681Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:13:49.988295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:13:49.988585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:13:49.992815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:13:49.994062Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:13:49.994117Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:13:49.994223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:13:49.994262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:13:49.994293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:13:49.994456Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:13:50.001231Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:13:50.169223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:13:50.169442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:50.169675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:13:50.169930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:13:50.169984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:50.174754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:13:50.174900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:13:50.175109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:50.175186Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:13:50.175231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:13:50.175259Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:13:50.181173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:50.181240Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:13:50.181283Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:13:50.183113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:50.183158Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:50.183193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:13:50.183256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:13:50.186390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:13:50.188325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:13:50.188530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:13:50.189504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:13:50.189634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:13:50.189688Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:13:50.189968Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:13:50.190027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:13:50.190186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:13:50.190260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:13:50.192217Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:13:50.192268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:13:50.192439Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:13:50.192471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:13:50.192799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:50.192840Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:13:50.192915Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:13:50.192961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:13:50.192996Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:13:50.193022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:13:50.193052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:13:50.193090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:13:50.193122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:13:50.193146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:13:50.193205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:13:50.193254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:13:50.193284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:13:50.200531Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:13:50.200662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:13:50.200697Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... vConfigureStatus operationId:102:0 at schemeshard:72057594046678944 2025-04-17T17:13:58.813702Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 102:0 Got OK TEvConfigureStatus from tablet# 72075186233409548 shardIdx# 72057594046678944:3 at schemeshard# 72057594046678944 2025-04-17T17:13:58.815695Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-17T17:13:58.815925Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-17T17:13:58.815971Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-17T17:13:58.816389Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 102, at schemeshard: 72057594046678944 2025-04-17T17:13:58.816439Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-17T17:13:58.816485Z node 8 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 102, at schemeshard: 72057594046678944 2025-04-17T17:13:58.849954Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409549, partId: 0 2025-04-17T17:13:58.850159Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186233409549 2025-04-17T17:13:58.850222Z node 8 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 102:0 HandleReply TEvConfigureStatus operationId:102:0 at schemeshard:72057594046678944 2025-04-17T17:13:58.850290Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 102:0 Got OK TEvConfigureStatus from tablet# 72075186233409549 shardIdx# 72057594046678944:4 at schemeshard# 72057594046678944 2025-04-17T17:13:58.850337Z node 8 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 3 -> 128 2025-04-17T17:13:58.852272Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:13:58.852457Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:13:58.852509Z node 8 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:13:58.852558Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 102:0, at tablet# 72057594046678944 2025-04-17T17:13:58.852612Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-04-17T17:13:58.852782Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:13:58.854328Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-17T17:13:58.854500Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-04-17T17:13:58.854851Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:13:58.854992Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 34359740523 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:13:58.855042Z node 8 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:0, at tablet# 72057594046678944 2025-04-17T17:13:58.855468Z node 8 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-04-17T17:13:58.855531Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:0, at tablet# 72057594046678944 2025-04-17T17:13:58.855668Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-17T17:13:58.855770Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[8:360:2334], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 72075186233409549, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-04-17T17:13:58.857437Z node 8 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:13:58.857494Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:13:58.857682Z node 8 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:13:58.857728Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [8:203:2205], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-17T17:13:58.858123Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:13:58.858183Z node 8 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 102:0, ProgressState, NeedSyncHive: 0 2025-04-17T17:13:58.858218Z node 8 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 240 -> 240 2025-04-17T17:13:58.859118Z node 8 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:13:58.859222Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:13:58.859261Z node 8 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-17T17:13:58.859303Z node 8 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-04-17T17:13:58.859344Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-04-17T17:13:58.859437Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-17T17:13:58.863474Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:13:58.863530Z node 8 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-17T17:13:58.863683Z node 8 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:13:58.863723Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:13:58.863770Z node 8 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:13:58.863809Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:13:58.863852Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-17T17:13:58.863921Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [8:299:2290] message: TxId: 102 2025-04-17T17:13:58.863986Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:13:58.864035Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-17T17:13:58.864070Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-17T17:13:58.864287Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-17T17:13:58.865037Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T17:13:58.870698Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T17:13:58.870757Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [8:505:2444] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 2025-04-17T17:13:58.873908Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "USER_0" ExternalStatisticsAggregator: false } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:13:58.874315Z node 8 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 103:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "USER_0" ExternalStatisticsAggregator: false } 2025-04-17T17:13:58.874367Z node 8 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 103:0, path /MyRoot/USER_0 2025-04-17T17:13:58.874495Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 103:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed, at schemeshard: 72057594046678944 2025-04-17T17:13:58.874539Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed, at schemeshard: 72057594046678944 2025-04-17T17:13:58.876898Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:13:58.877064Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed, operation: ALTER DATABASE, path: /MyRoot/USER_0 TestModificationResult got TxId: 103, wait until txId: 103 >> SystemView::ShowCreateTablePartitionAtKeys [GOOD] >> SystemView::ShowCreateTableColumn >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst-ExternalHive >> SystemView::TabletsFields [GOOD] >> SystemView::TabletsShards >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRange-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips+EvWrite >> WithSDK::DescribeConsumer [GOOD] >> WithSDK::CommitWithWrongSessionId >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-false >> TopicAutoscaling::ControlPlane_CDC [GOOD] >> TopicAutoscaling::ControlPlane_CDC_Enable >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-true >> TCdcStreamTests::DocApiNegative [GOOD] >> TCdcStreamTests::Negative >> TopicAutoscaling::ReBalancingAfterSplit_sessionsWithPartition [GOOD] >> TopicAutoscaling::ReadFromTimestamp_BeforeAutoscaleAwareSDK >> TConfigsCacheTests::TestFullConfigurationRestore [GOOD] >> TConfigsCacheTests::TestConfigurationSaveOnNotification >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> Cdc::NewAndOldImagesLog[PqRunner] [GOOD] >> Cdc::NewAndOldImagesLog[YdsRunner] >> DataShardReadIteratorConsistency::Bug_7674_IteratorDuplicateRows [GOOD] >> DataShardReadIteratorConsistency::LeaseConfirmationNotOutOfOrder >> KqpStats::DataQueryWithEffects+UseSink >> DataShardReadIterator::ShouldReadKeyPrefix3 [GOOD] >> DataShardReadIterator::ShouldReadFromFollower >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:13:53.339668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:13:53.339744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:13:53.339783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:13:53.339813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:13:53.339854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:13:53.339878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:13:53.339937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:13:53.340001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:13:53.340307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:13:53.426552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:13:53.426612Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:13:53.441517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:13:53.441647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:13:53.441814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:13:53.450499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:13:53.450820Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:13:53.451506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:13:53.451831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:13:53.456077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:13:53.457353Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:13:53.457437Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:13:53.457538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:13:53.457582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:13:53.457611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:13:53.457801Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:13:53.464147Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:13:53.617951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:13:53.618175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:53.618385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:13:53.618625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:13:53.618685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:53.625099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:13:53.625267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:13:53.625467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:53.625542Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:13:53.625585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:13:53.625629Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:13:53.628220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:53.628277Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:13:53.628309Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:13:53.630343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:53.630396Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:53.630449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:13:53.630511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:13:53.634028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:13:53.636009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:13:53.636231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:13:53.637169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:13:53.637302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:13:53.637357Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:13:53.637600Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:13:53.637641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:13:53.637817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:13:53.637901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:13:53.640023Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:13:53.640065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:13:53.640249Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:13:53.640284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:13:53.640612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:53.640656Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:13:53.640762Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:13:53.640801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:13:53.640834Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:13:53.640874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:13:53.640925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:13:53.640965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:13:53.640999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:13:53.641024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:13:53.641109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:13:53.641146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:13:53.641171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:13:53.642961Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:13:53.643058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:13:53.643089Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... shard: 72057594046678944, txId: 103, path id: 1 2025-04-17T17:14:00.565078Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:207:2209], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-17T17:14:00.565145Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T17:14:00.565189Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 103:0 ProgressState 2025-04-17T17:14:00.565234Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 135 -> 240 2025-04-17T17:14:00.566275Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:14:00.566349Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:14:00.566373Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-17T17:14:00.566399Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-17T17:14:00.566426Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:14:00.567620Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:14:00.567718Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:14:00.567741Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-17T17:14:00.567765Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-17T17:14:00.567792Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-04-17T17:14:00.567851Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-04-17T17:14:00.573803Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72075186233409546 at ss 72057594046678944 2025-04-17T17:14:00.573866Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72075186233409546 at ss 72057594046678944 2025-04-17T17:14:00.573891Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72075186233409546 at ss 72057594046678944 2025-04-17T17:14:00.573916Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72075186233409546 at ss 72057594046678944 2025-04-17T17:14:00.574110Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T17:14:00.574152Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-17T17:14:00.574283Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-17T17:14:00.574328Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T17:14:00.574383Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-17T17:14:00.574422Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T17:14:00.574460Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-17T17:14:00.574505Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T17:14:00.574551Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-17T17:14:00.574588Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-17T17:14:00.574779Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-17T17:14:00.575441Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-17T17:14:00.576888Z node 7 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-17T17:14:00.577093Z node 7 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186234409547 2025-04-17T17:14:00.577275Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-17T17:14:00.577592Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 Forgetting tablet 72075186234409547 2025-04-17T17:14:00.579022Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-17T17:14:00.579227Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T17:14:00.579783Z node 7 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186234409546 2025-04-17T17:14:00.579992Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-17T17:14:00.580448Z node 7 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186234409548 2025-04-17T17:14:00.581397Z node 7 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Forgetting tablet 72075186234409546 2025-04-17T17:14:00.584960Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-17T17:14:00.585185Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186234409548 2025-04-17T17:14:00.591966Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-17T17:14:00.592258Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T17:14:00.593337Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:14:00.593403Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T17:14:00.593541Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T17:14:00.594225Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:14:00.594290Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T17:14:00.594372Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:14:00.596854Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-17T17:14:00.596919Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-17T17:14:00.597023Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-17T17:14:00.597045Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186234409547 2025-04-17T17:14:00.599139Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-17T17:14:00.599177Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186234409546 2025-04-17T17:14:00.599250Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-17T17:14:00.599289Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186234409548 2025-04-17T17:14:00.599856Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-17T17:14:00.600978Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-17T17:14:00.601245Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-17T17:14:00.601296Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-17T17:14:00.601806Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-17T17:14:00.601945Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-17T17:14:00.601997Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [7:584:2525] TestWaitNotification: OK eventTxId 103 2025-04-17T17:14:00.602597Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:14:00.602798Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 251us result status StatusPathDoesNotExist 2025-04-17T17:14:00.602964Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TConfigsCacheTests::TestConfigurationSaveOnNotification [GOOD] >> TConfigsCacheTests::TestOverwrittenConfigurationDoesntCauseNotification >> TopicAutoscaling::PartitionSplit_PreferedPartition_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_PreferedPartition_PQv1 >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-true >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit198 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit900 >> KqpScanArrowFormat::AllTypesColumns [GOOD] >> KqpScanArrowFormat::AllTypesColumnsCellvec >> KqpScanArrowFormat::SingleKey [GOOD] >> KqpScanArrowFormat::JoinWithParams >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-true >> TCdcStreamTests::Negative [GOOD] >> TCdcStreamTests::DisableProtoSourceIdInfo >> AsyncIndexChangeExchange::SenderShouldShakeHandsTwice [GOOD] >> AsyncIndexChangeExchange::SenderShouldShakeHandsAfterAddingIndex >> KqpScanArrowInChanels::AllTypesColumns [GOOD] >> KqpScanArrowInChanels::SingleKey >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-true [GOOD] >> SystemView::ShowCreateTableReadReplicas [GOOD] >> SystemView::ShowCreateTableTtlSettings >> SystemView::CollectScanQueries [GOOD] >> SystemView::CollectScriptingQueries >> KqpTypes::QuerySpecialTypes >> TConfigsCacheTests::TestOverwrittenConfigurationDoesntCauseNotification [GOOD] >> TConfigsCacheTests::TestConfigurationChangeSensor >> TConsoleTests::TestCreateTenant [GOOD] >> TConsoleTests::TestCreateTenantExtSubdomain >> TopicAutoscaling::PartitionSplit_DistributedTxCommit [GOOD] >> TopicAutoscaling::PartitionSplit_DistributedTxCommit_CheckOffsetCommitForDifferentCases_NotSplitedTopic >> SystemView::AuthUsers_ResultOrder [GOOD] >> SystemView::AuthUsers_TableRange >> TCdcStreamTests::DisableProtoSourceIdInfo [GOOD] >> TCdcStreamTests::CreateStream ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:13:54.483535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:13:54.483644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:13:54.483678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:13:54.483713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:13:54.483754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:13:54.483779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:13:54.483830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:13:54.483900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:13:54.484271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:13:54.570232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:13:54.570308Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:13:54.584003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:13:54.584147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:13:54.584300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:13:54.589903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:13:54.590173Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:13:54.590856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:13:54.591115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:13:54.594784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:13:54.596014Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:13:54.596070Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:13:54.596202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:13:54.596264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:13:54.596318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:13:54.596516Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:13:54.603076Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:13:54.747687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:13:54.747891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:54.748070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:13:54.748340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:13:54.748396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:54.750342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:13:54.750465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:13:54.750606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:54.750665Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:13:54.750715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:13:54.750745Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:13:54.752536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:54.752591Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:13:54.752625Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:13:54.754188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:54.754236Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:54.754272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:13:54.754338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:13:54.757787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:13:54.759762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:13:54.759933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:13:54.760868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:13:54.760996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:13:54.761063Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:13:54.761334Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:13:54.761384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:13:54.761535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:13:54.761608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:13:54.763737Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:13:54.763779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:13:54.763948Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:13:54.763992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:13:54.764281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:54.764328Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:13:54.764419Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:13:54.764452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:13:54.764492Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:13:54.764522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:13:54.764565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:13:54.764635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:13:54.764687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:13:54.764719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:13:54.764777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:13:54.764813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:13:54.764847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:13:54.766632Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:13:54.766734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:13:54.766767Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... D INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-17T17:14:02.800294Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T17:14:02.800384Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-17T17:14:02.802254Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-17T17:14:02.802404Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-04-17T17:14:02.802966Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T17:14:02.803255Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:14:02.803380Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 30064773229 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:14:02.803443Z node 7 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TPropose, operationId: 102:0 HandleReply TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2025-04-17T17:14:02.803537Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 2] name: USER_0 type: EPathTypeExtSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 102 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:14:02.803583Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:14:02.803636Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 134 2025-04-17T17:14:02.806129Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T17:14:02.806685Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:14:02.806753Z node 7 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TDeleteExternalShards, operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:14:02.806878Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 134 -> 135 2025-04-17T17:14:02.807152Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:14:02.807222Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 FAKE_COORDINATOR: Erasing txId 102 2025-04-17T17:14:02.808815Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:14:02.808856Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:14:02.808989Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:14:02.809151Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:14:02.809183Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:207:2209], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-17T17:14:02.809219Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:207:2209], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-17T17:14:02.809272Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:14:02.809315Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 102:0 ProgressState 2025-04-17T17:14:02.809381Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 135 -> 240 2025-04-17T17:14:02.810586Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:14:02.810669Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:14:02.810696Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-17T17:14:02.810726Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-17T17:14:02.810758Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:14:02.811658Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:14:02.811739Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:14:02.811765Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-17T17:14:02.811794Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-17T17:14:02.811823Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T17:14:02.811889Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-17T17:14:02.813542Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:14:02.813608Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-17T17:14:02.813776Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:14:02.813826Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:14:02.813877Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:14:02.813927Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:14:02.813979Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-17T17:14:02.814032Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:14:02.814079Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-17T17:14:02.814124Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-17T17:14:02.814195Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T17:14:02.814551Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:14:02.814618Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T17:14:02.814687Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T17:14:02.815057Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:14:02.815112Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T17:14:02.815188Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:14:02.816325Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T17:14:02.816775Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T17:14:02.818711Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-17T17:14:02.818788Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-17T17:14:02.819032Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-17T17:14:02.819087Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-17T17:14:02.819544Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-17T17:14:02.819640Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T17:14:02.819703Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:344:2335] TestWaitNotification: OK eventTxId 102 2025-04-17T17:14:02.820248Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:14:02.820444Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 230us result status StatusPathDoesNotExist 2025-04-17T17:14:02.820616Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> KqpScanArrowInChanels::AggregateNoColumn [GOOD] >> KqpScanArrowInChanels::AggregateNoColumnNoRemaps >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-false >> DataShardReadIterator::ShouldReadMultipleKeys [GOOD] >> DataShardReadIterator::ShouldReadMultipleKeysOneByOne >> TConfigsCacheTests::TestConfigurationChangeSensor [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotification >> KqpQuery::RewriteIfPresentToMap [GOOD] >> KqpQuery::RowsLimit >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixLeftBorder+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixLeftBorder-EvWrite >> TopicAutoscaling::PartitionMerge_PreferedPartition_PQv1 [GOOD] >> TopicAutoscaling::ControlPlane_CreateAlterDescribe >> TConfigsDispatcherTests::TestSubscriptionNotification [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberAfterUpdate >> KqpQuery::UdfTerminate >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberAfterUpdate [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberDuringUpdate >> Cdc::NewAndOldImagesLog[YdsRunner] [GOOD] >> Cdc::NewAndOldImagesLog[TopicRunner] >> SystemView::TabletsShards [GOOD] >> SystemView::TabletsFollowers >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips-EvWrite >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberDuringUpdate [GOOD] >> TConfigsDispatcherTests::TestRemoveSubscription >> TCdcStreamTests::CreateStream [GOOD] >> TCdcStreamTests::AlterStream >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-true >> KqpStats::DataQueryWithEffects+UseSink [GOOD] >> KqpStats::DataQueryWithEffects-UseSink >> TConfigsDispatcherTests::TestRemoveSubscription [GOOD] >> TConfigsDispatcherTests::TestRemoveSubscriptionWhileUpdateInProcess >> SystemView::AuthOwners_Access [GOOD] >> SystemView::AuthOwners_ResultOrder >> KqpScanArrowFormat::AllTypesColumnsCellvec [GOOD] >> KqpScanArrowFormat::AggregateNoColumnNoRemaps >> KqpScanArrowInChanels::SingleKey [GOOD] >> KqpScanArrowInChanels::JoinWithParams >> KqpScanArrowFormat::JoinWithParams [GOOD] >> KqpScanArrowInChanels::AggregateCountStar >> DataShardReadIterator::ShouldReadFromFollower [GOOD] >> DataShardReadIterator::ShouldReadHeadFromFollower >> TConfigsDispatcherTests::TestRemoveSubscriptionWhileUpdateInProcess [GOOD] >> TConfigsDispatcherTests::TestEmptyChangeCausesNoNotification >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit900 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit900 >> TConsoleTests::TestCreateTenantExtSubdomain [GOOD] >> TConsoleTests::TestCreateSharedTenant >> AsyncIndexChangeExchange::SenderShouldShakeHandsAfterAddingIndex [GOOD] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnFreshTable >> TConfigsDispatcherTests::TestEmptyChangeCausesNoNotification [GOOD] >> TConfigsDispatcherTests::TestYamlAndNonYamlCoexist >> TCdcStreamTests::AlterStream [GOOD] >> TCdcStreamTests::DropStream >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-true [GOOD] >> DataShardReadIteratorConsistency::LeaseConfirmationNotOutOfOrder [GOOD] >> DataShardReadIteratorFastCancel::ShouldProcessFastCancel >> SystemView::AuthGroupMembers_Access [GOOD] >> SystemView::AuthGroupMembers_ResultOrder >> KqpTypes::QuerySpecialTypes [GOOD] >> KqpTypes::SelectNull >> TConfigsDispatcherTests::TestYamlAndNonYamlCoexist [GOOD] >> TConfigsDispatcherTests::TestYamlEndToEnd ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:13:54.596486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:13:54.596626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:13:54.596663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:13:54.596695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:13:54.596748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:13:54.596775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:13:54.596825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:13:54.596894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:13:54.597173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:13:54.681060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:13:54.681114Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:13:54.700771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:13:54.701133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:13:54.701274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:13:54.708474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:13:54.708632Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:13:54.709192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:13:54.709386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:13:54.711821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:13:54.712952Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:13:54.713005Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:13:54.713159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:13:54.713200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:13:54.713234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:13:54.713345Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:13:54.719041Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:13:54.841693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:13:54.841925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:54.842116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:13:54.842330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:13:54.842386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:54.844616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:13:54.844723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:13:54.844910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:54.844964Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:13:54.845004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:13:54.845032Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:13:54.846798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:54.846853Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:13:54.846895Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:13:54.848488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:54.848577Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:54.848668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:13:54.848754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:13:54.852171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:13:54.853782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:13:54.853955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:13:54.854820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:13:54.854973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:13:54.855028Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:13:54.855249Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:13:54.855305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:13:54.855451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:13:54.855553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:13:54.857306Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:13:54.857344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:13:54.857511Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:13:54.857563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:13:54.857906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:13:54.857948Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:13:54.858091Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:13:54.858127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:13:54.858160Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:13:54.858187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:13:54.858219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:13:54.858256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:13:54.858289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:13:54.858313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:13:54.858367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:13:54.858398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:13:54.858439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:13:54.860175Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:13:54.860291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:13:54.860327Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new path created for pathId [OwnerId: 72075186233409546, LocalPathId: 9] was 2 2025-04-17T17:14:09.279661Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 116:0 1 -> 2 2025-04-17T17:14:09.280291Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 116:1, propose status:StatusAccepted, reason: , at schemeshard: 72075186233409546 2025-04-17T17:14:09.280358Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 116:0, at schemeshard: 72075186233409546 2025-04-17T17:14:09.280496Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 12 2025-04-17T17:14:09.280565Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 9] was 3 2025-04-17T17:14:09.283176Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 116, response: Status: StatusAccepted TxId: 116 SchemeshardId: 72075186233409546 PathId: 9, at schemeshard: 72075186233409546 2025-04-17T17:14:09.283388Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 116, database: /MyRoot/USER_0, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /MyRoot/USER_0/Table11 2025-04-17T17:14:09.283756Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-04-17T17:14:09.283814Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 116, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-04-17T17:14:09.284041Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 116, path id: [OwnerId: 72075186233409546, LocalPathId: 9] 2025-04-17T17:14:09.284170Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-04-17T17:14:09.284227Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:729:2631], at schemeshard: 72075186233409546, txId: 116, path id: 1 2025-04-17T17:14:09.284275Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:729:2631], at schemeshard: 72075186233409546, txId: 116, path id: 9 2025-04-17T17:14:09.284725Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 116:0, at schemeshard: 72075186233409546 2025-04-17T17:14:09.284798Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 116:0 ProgressState, operation type: TxCreateTable, at tablet# 72075186233409546 2025-04-17T17:14:09.285058Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 116:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72075186233409546 OwnerIdx: 11 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 9 BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } 2025-04-17T17:14:09.285859Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 1 Version: 16 PathOwnerId: 72075186233409546, cookie: 116 2025-04-17T17:14:09.285968Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 1 Version: 16 PathOwnerId: 72075186233409546, cookie: 116 2025-04-17T17:14:09.286021Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 116 2025-04-17T17:14:09.286096Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 116, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 16 2025-04-17T17:14:09.286153Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 13 2025-04-17T17:14:09.288037Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 9 Version: 1 PathOwnerId: 72075186233409546, cookie: 116 2025-04-17T17:14:09.288118Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 9 Version: 1 PathOwnerId: 72075186233409546, cookie: 116 2025-04-17T17:14:09.288139Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 116 2025-04-17T17:14:09.288165Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 116, pathId: [OwnerId: 72075186233409546, LocalPathId: 9], version: 1 2025-04-17T17:14:09.288189Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 9] was 4 2025-04-17T17:14:09.288263Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 116, ready parts: 0/1, is published: true 2025-04-17T17:14:09.290143Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 116:0 from tablet: 72075186233409546 to tablet: 72057594037968897 cookie: 72075186233409546:11 msg type: 268697601 2025-04-17T17:14:09.290306Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 116, partId: 0, tablet: 72057594037968897 2025-04-17T17:14:09.290396Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 116, shardIdx: 72075186233409546:11, partId: 0 2025-04-17T17:14:09.290833Z node 7 :HIVE INFO: [72057594037968897] TEvCreateTablet, msg: Owner: 72075186233409546 OwnerIdx: 11 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 9 BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } 2025-04-17T17:14:09.291086Z node 7 :HIVE INFO: [72057594037968897] TEvCreateTablet, Owner 72075186233409546, OwnerIdx 11, type DataShard, boot OK, tablet id 72075186233409556 2025-04-17T17:14:09.291268Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72075186233409546 message: Status: OK Owner: 72075186233409546 OwnerIdx: 11 TabletID: 72075186233409556 Origin: 72057594037968897 2025-04-17T17:14:09.291326Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 116, shardIdx: 72075186233409546:11, partId: 0 2025-04-17T17:14:09.291508Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 116:0, at schemeshard: 72075186233409546, message: Status: OK Owner: 72075186233409546 OwnerIdx: 11 TabletID: 72075186233409556 Origin: 72057594037968897 2025-04-17T17:14:09.291593Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 116:0 HandleReply TEvCreateTabletReply, at tabletId: 72075186233409546 2025-04-17T17:14:09.291702Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 116:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72075186233409546 OwnerIdx: 11 TabletID: 72075186233409556 Origin: 72057594037968897 2025-04-17T17:14:09.291815Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 116:0 2 -> 3 2025-04-17T17:14:09.293139Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 116 2025-04-17T17:14:09.295260Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 116 2025-04-17T17:14:09.296718Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 116:0, at schemeshard: 72075186233409546 2025-04-17T17:14:09.297112Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 116:0, at schemeshard: 72075186233409546 2025-04-17T17:14:09.297182Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId# 116:0 ProgressState at tabletId# 72075186233409546 2025-04-17T17:14:09.297292Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable TConfigureParts operationId# 116:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409556 seqNo: 3:8 2025-04-17T17:14:09.297722Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable TConfigureParts operationId# 116:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409556 message: TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 678 RawX2: 30064773663 } TxBody: "\n\236\004\n\007Table11\020\t\032\r\n\003key\030\002 \001(\000@\000\032\020\n\005Value\030\200$ \002(\000@\000(\001:\262\003\022\253\003\010\200\200\200\002\020\254\002\030\364\003 \200\200\200\010(\0000\200\200\200 8\200\200\200\010@\2008H\000RX\010\000\020\000\030\010 \010(\200\200\200@0\377\377\377\377\0178\001B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen1P\nX\200\200\001`nh\000p\000Rb\010\001\020\200\200\200\024\030\005 \020(\200\200\200\200\0020\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen2P\nX\200\200\001`nh\200\200\200\004p\200\200\200\004Rc\010\002\020\200\200\200\310\001\030\005 \020(\200\200\200\200@0\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen3P\nX\200\200\001`nh\200\200\200(p\200\200\200(X\001`\005j$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionr\017compaction_gen0z\017compaction_gen0\202\001\004scan\210\001\200\200\200\010\220\001\364\003\230\0012\270\001\2008\300\001\006R\002\020\001J\026/MyRoot/USER_0/Table11\242\001\006\001\000\000\000\000\200\252\001\000\260\001\001\270\001\000\210\002\001\222\002\013\t\n\000\220\000\000\020\000\001\020\t:\004\010\003\020\010" TxId: 116 ExecLevel: 0 Flags: 0 SchemeShardId: 72075186233409546 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } SubDomainPathId: 1 2025-04-17T17:14:09.301099Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 116:0 from tablet: 72075186233409546 to tablet: 72075186233409556 cookie: 72075186233409546:11 msg type: 269549568 2025-04-17T17:14:09.301286Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 116, partId: 0, tablet: 72075186233409556 TestModificationResult got TxId: 116, wait until txId: 116 TestModificationResults wait txId: 117 2025-04-17T17:14:09.336866Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_0" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table12" Columns { Name: "key" Type: "Uint32" } Columns { Name: "Value" Type: "Utf8" } KeyColumnNames: "key" } } TxId: 117 TabletId: 72075186233409546 , at schemeshard: 72075186233409546 2025-04-17T17:14:09.345437Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 117, response: Status: StatusQuotaExceeded Reason: "Request exceeded a limit on the number of schema operations, try again later." TxId: 117 SchemeshardId: 72075186233409546, at schemeshard: 72075186233409546 2025-04-17T17:14:09.345687Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 117, database: /MyRoot/USER_0, subject: , status: StatusQuotaExceeded, reason: Request exceeded a limit on the number of schema operations, try again later., operation: CREATE TABLE, path: /MyRoot/USER_0/Table12 TestModificationResult got TxId: 117, wait until txId: 117 >> SystemView::CollectScriptingQueries [GOOD] >> DataShardReadIterator::ShouldReadMultipleKeysOneByOne [GOOD] >> DataShardReadIterator::ShouldReadNotExistingRange >> KqpQuery::RowsLimit [GOOD] >> KqpQuery::ReadOverloaded-StreamLookup >> TConfigsDispatcherTests::TestYamlEndToEnd [GOOD] >> TConsoleConfigHelpersTests::TestConfigCourier >> Cdc::NewAndOldImagesLog[TopicRunner] [GOOD] >> Cdc::NewAndOldImagesLogDebezium >> TopicAutoscaling::ControlPlane_CDC_Enable [GOOD] >> TopicAutoscaling::ControlPlane_CDC_Disable >> KqpScanArrowInChanels::AggregateNoColumnNoRemaps [GOOD] >> KqpScanArrowInChanels::AggregateWithFunction >> KqpQuery::UdfTerminate [GOOD] >> KqpQuery::UdfMemoryLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::CollectScriptingQueries [GOOD] Test command err: 2025-04-17T17:11:24.119371Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494330887871102410:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:11:24.119415Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/000b8f/r3tmp/tmp4z0XzX/pdisk_1.dat 2025-04-17T17:11:25.111337Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:25.127705Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 4979, node 1 2025-04-17T17:11:25.213772Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:11:25.213876Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:11:25.332625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:11:25.376605Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:11:25.376632Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:11:25.376639Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:11:25.376743Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11283 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:11:28.680331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:11:28.741638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:11:28.797274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:11:28.868389Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:11:28.868507Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:11:28.872080Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-04-17T17:11:28.883675Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:11:28.883777Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:11:28.886283Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-04-17T17:11:28.920657Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:11:28.920878Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:11:29.115719Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494330887871102410:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:11:29.118309Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:11:29.366165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:11:29.416862Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:11:29.416969Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:11:29.448571Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-17T17:11:29.432211Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494330908396191265:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:11:29.444312Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:11:29.473964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T17:11:29.480825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-04-17T17:11:29.929871Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:11:29.929950Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:11:29.944746Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:11:30.061611Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:11:34.423209Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494330908396191265:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:11:34.423270Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:11:39.597800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:11:40.125213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:11:40.125245Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:41.502900Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494330960885547924:2363], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:11:41.503009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:11:41.503437Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494330960885547936:2366], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:11:41.507650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710663:3, at schemeshard: 72057594046644480 2025-04-17T17:11:41.841608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494330960885547938:2367], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710663 completed, doublechecking } 2025-04-17T17:11:41.977544Z node 1 :TX_PROXY ERROR: Actor# [1:7494330960885548016:3148] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:11:46.650119Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01js2br7nscf5g5safjt9pht8t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDM2ZTI4ZjUtMWRkNjlkMDAtNDY5NDU0M2EtZDNjYzc1OTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:11:47.742059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:11:50.269576Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01js2brenp0j7505ftf22fjewg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDM2ZTI4ZjUtMWRkNjlkMDAtNDY5NDU0M2EtZDNjYzc1OTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:11:51.795856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:11:56.520540Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710669. Ctx: { TraceId: 01js2brp4y837dnzakcqh2mg7e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDM2ZTI4ZjUtMWRkNjlkMDAtNDY5NDU0M2EtZDNjYzc1OTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:11:58.224840Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710671. Ctx: { TraceId: 01js2brph2f4zrt8a17rvapwwk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2ViZjFmNDEtNWE2MDJmN2YtNTFmMTAxOGYtY2U5NmJjNTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:11:58.227433Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7494331033899992632:2432], owner: [1:7494331033899992629:2430], scan id: 0, table id: [72057594046644480:2:0:nodes] 2025-04-17T17:11:58.236119Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7494331033899992632:2432], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root/Tenant1, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 2], database node count: 2 2025-04-17T17:11:58.241881Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7494331033899992632:2432], row count: 2, finished: 1 2025-04-17T17:11:58.241937Z node 1 :SYSTEM_VIEWS INFO ... 72057594046644480 2025-04-17T17:14:01.287805Z node 31 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[31:7494331539665335134:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:01.287896Z node 31 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:14:01.497114Z node 31 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [31:7494331561140172344:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:01.497115Z node 31 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [31:7494331561140172334:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:01.497215Z node 31 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:01.501755Z node 31 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T17:14:01.514515Z node 31 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [31:7494331561140172348:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T17:14:01.605272Z node 31 :TX_PROXY ERROR: Actor# [31:7494331561140172400:2399] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:14:01.711123Z node 31 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01js2bwgcp9p2fgb8redc0y87b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=31&id=NDQzNzgwNGEtNWZiZjljYzEtNDZlYjVmMjQtOTg3NGJjZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:14:01.842725Z node 31 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01js2bwgm02q2g4mh4gkeacxyf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=31&id=Yzk1NWM1MzYtODYzNGU0Y2MtMzA3MjU4YTUtNGVhZjVh, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:14:01.904166Z node 31 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744910041882, txId: 281474976710662] shutting down 2025-04-17T17:14:02.166221Z node 31 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01js2bwgtc89wb1pav7p602fyq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=31&id=ZjAxZDQ5NjgtYjYxMDZiNTYtNmFiZjc2ZjYtMTU3OGMwMTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:14:02.168501Z node 31 :SYSTEM_VIEWS INFO: Scan started, actor: [31:7494331565435139815:2366], owner: [31:7494331565435139811:2364], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2025-04-17T17:14:02.172291Z node 31 :SYSTEM_VIEWS INFO: Scan prepared, actor: [31:7494331565435139815:2366], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-17T17:14:02.173328Z node 31 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [31:7494331565435139815:2366], row count: 2, finished: 1 2025-04-17T17:14:02.173398Z node 31 :SYSTEM_VIEWS INFO: Scan finished, actor: [31:7494331565435139815:2366], owner: [31:7494331565435139811:2364], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2025-04-17T17:14:02.179956Z node 31 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744910042165, txId: 281474976710664] shutting down 2025-04-17T17:14:03.387131Z node 32 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[32:7494331571175105618:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:03.387302Z node 32 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/000b8f/r3tmp/tmpNj87Lg/pdisk_1.dat 2025-04-17T17:14:03.539847Z node 32 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:14:03.623464Z node 32 :HIVE WARN: HIVE#72057594037968897 Node(32, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:14:03.623572Z node 32 :HIVE WARN: HIVE#72057594037968897 Node(32, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:14:03.625482Z node 32 :HIVE WARN: HIVE#72057594037968897 Node(32, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28384, node 32 2025-04-17T17:14:03.702292Z node 32 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:14:03.702322Z node 32 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:14:03.702332Z node 32 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:14:03.702480Z node 32 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9216 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T17:14:04.088742Z node 32 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:14:04.114904Z node 32 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:08.387829Z node 32 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[32:7494331571175105618:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:08.387935Z node 32 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:14:08.681564Z node 32 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [32:7494331592649942849:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:08.681668Z node 32 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:08.681784Z node 32 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [32:7494331592649942857:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:08.686362Z node 32 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T17:14:08.702466Z node 32 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [32:7494331592649942863:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T17:14:08.806188Z node 32 :TX_PROXY ERROR: Actor# [32:7494331592649942914:2400] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:14:08.979030Z node 32 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2bwqd6a7bd50e8gsvz9q96, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=32&id=Y2IxYjE5ZTctN2FlMmJjYmEtZDc5ODM4MzMtNTU3ODZmNGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:14:09.210837Z node 32 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01js2bwqskfgabmxx7wgj6e6g6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=32&id=NGMyNjQwZmQtZWM0Yzc2ZTEtNTcxOWIzM2EtNjc2NDBmNzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:14:09.231867Z node 32 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744910049246, txId: 281474976715662] shutting down 2025-04-17T17:14:09.577871Z node 32 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01js2bwr06323jgm1dv9be7kge, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=32&id=NWRhNGMwNzQtNjQ5NThjZC00ZWFlOTk4ZC1hYzY4Y2I4Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:14:09.582388Z node 32 :SYSTEM_VIEWS INFO: Scan started, actor: [32:7494331596944910341:2372], owner: [32:7494331596944910338:2370], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2025-04-17T17:14:09.589859Z node 32 :SYSTEM_VIEWS INFO: Scan prepared, actor: [32:7494331596944910341:2372], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-17T17:14:09.590541Z node 32 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [32:7494331596944910341:2372], row count: 2, finished: 1 2025-04-17T17:14:09.590594Z node 32 :SYSTEM_VIEWS INFO: Scan finished, actor: [32:7494331596944910341:2372], owner: [32:7494331596944910338:2370], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2025-04-17T17:14:09.594586Z node 32 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744910049576, txId: 281474976715664] shutting down >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixLeftBorder-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixRightBorder+EvWrite >> KqpQuery::CurrentUtcTimestamp >> TConsoleConfigHelpersTests::TestConfigCourier [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriber >> Balancing::Balancing_ManyTopics_PQv1 [GOOD] >> TopicAutoscaling::BalancingAfterSplit_sessionsWithPartition |76.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |76.0%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |76.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw >> SystemView::TabletsFollowers [GOOD] >> SystemView::TabletsRanges >> KqpStats::DataQueryWithEffects-UseSink [GOOD] >> KqpStats::DataQueryMulti >> TConsoleTests::TestCreateSharedTenant [GOOD] >> TConsoleTests::TestCreateServerlessTenant >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips2+EvWrite >> KqpParams::RowsList >> KqpScanArrowInChanels::JoinWithParams [GOOD] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnFreshTable [GOOD] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnAlteredTable >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_AutoscaleAwareSDK >> TConsoleConfigHelpersTests::TestConfigSubscriber [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantTenant >> DataShardReadIterator::ShouldReadHeadFromFollower [GOOD] >> DataShardReadIterator::ShouldReadFromHead >> TCdcStreamTests::DropStream [GOOD] >> TCdcStreamTests::AlterStreamImplShouldFail >> KqpScanArrowInChanels::AggregateCountStar [GOOD] >> KqpScanArrowInChanels::AggregateByColumn >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit900 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit1000 >> KqpTypes::SelectNull [GOOD] >> KqpTypes::Time64Columns+EnableTableDatetime64 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowInChanels::JoinWithParams [GOOD] Test command err: Trying to start YDB, gRPC: 22853, MsgBus: 2071 2025-04-17T17:13:55.845238Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494331535906685494:2194];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:13:55.845662Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/001ee5/r3tmp/tmpLFcPeM/pdisk_1.dat 2025-04-17T17:13:56.631031Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:13:56.651509Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:13:56.652033Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:13:56.657693Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22853, node 1 2025-04-17T17:13:56.872308Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:13:56.872326Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:13:56.872332Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:13:56.872424Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2071 TClient is connected to server localhost:2071 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:13:57.726336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:13:57.746904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:13:57.979162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:13:58.170217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:13:58.256866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:13:59.962867Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331553086556312:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:13:59.962995Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:00.276919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:14:00.328926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:14:00.377054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:14:00.443585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:14:00.494095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:14:00.556101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:14:00.597135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:14:00.660492Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331557381524271:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:00.660594Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:00.660867Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331557381524276:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:00.664964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:14:00.678125Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494331557381524278:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:14:00.774566Z node 1 :TX_PROXY ERROR: Actor# [1:7494331557381524334:3546] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:14:00.840248Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494331535906685494:2194];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:00.840327Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:14:01.718096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T17:14:02.073578Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744910042099, txId: 281474976710674] shutting down 864000000000 Trying to start YDB, gRPC: 13923, MsgBus: 4213 2025-04-17T17:14:02.851701Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494331567200815747:2214];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:02.879685Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/001ee5/r3tmp/tmp1fP6ZW/pdisk_1.dat 2025-04-17T17:14:02.973509Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13923, node 2 2025-04-17T17:14:02.992957Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:14:02.993024Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:14:02.999863Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:14:03.084875Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:14:03.084895Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:14:03.084935Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:14:03.085034Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4213 TClient is connected to server localhost:4213 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:14:03.548598Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:03.554292Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T17:14:03.568972Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiti ... 05.904309Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T17:14:05.990383Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T17:14:06.050154Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T17:14:06.121415Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T17:14:06.286366Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T17:14:06.353252Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T17:14:06.471717Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494331584380687193:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:06.471801Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:06.472242Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494331584380687198:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:06.475328Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T17:14:06.488679Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494331584380687200:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T17:14:06.561689Z node 2 :TX_PROXY ERROR: Actor# [2:7494331584380687256:3549] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:14:07.544037Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744910047580, txId: 281474976715672] shutting down Trying to start YDB, gRPC: 29332, MsgBus: 29317 2025-04-17T17:14:08.360353Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494331592174628357:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:08.360417Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/001ee5/r3tmp/tmpyRwW3K/pdisk_1.dat 2025-04-17T17:14:08.480339Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:14:08.515706Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:14:08.515785Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 29332, node 3 2025-04-17T17:14:08.517488Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:14:08.574653Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:14:08.574679Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:14:08.574686Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:14:08.574816Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29317 TClient is connected to server localhost:29317 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:14:09.055771Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:09.065720Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T17:14:09.074436Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:09.142386Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:09.317915Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:09.416985Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:11.574849Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494331605059532003:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:11.574942Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:11.635086Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T17:14:11.705992Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T17:14:11.764569Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T17:14:11.808074Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T17:14:11.845446Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T17:14:11.919587Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T17:14:12.002558Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T17:14:12.085371Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494331609354499973:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:12.085473Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:12.085907Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494331609354499978:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:12.089499Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T17:14:12.106194Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494331609354499980:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T17:14:12.213520Z node 3 :TX_PROXY ERROR: Actor# [3:7494331609354500035:3547] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:14:13.365254Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494331592174628357:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:13.365329Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:14:13.636398Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744910053670, txId: 281474976715672] shutting down 2025-04-17T17:14:13.835881Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744910053873, txId: 281474976715674] shutting down >> TopicAutoscaling::ControlPlane_CreateAlterDescribe [GOOD] >> TopicAutoscaling::ControlPlane_DisableAutoPartitioning >> KqpScanArrowFormat::AggregateNoColumnNoRemaps [GOOD] >> KqpScanArrowFormat::AggregateWithFunction >> WithSDK::CommitWithWrongSessionId [GOOD] >> WithSDK::CommitToPastWithWrongSessionId >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantTenant [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantMultipleTenants >> TCdcStreamTests::AlterStreamImplShouldFail [GOOD] >> TCdcStreamTests::DropStreamImplShouldFail >> DataShardReadIteratorFastCancel::ShouldProcessFastCancel [GOOD] >> DataShardReadIteratorLatency::ReadSplitLatency >> TKeyValueTest::TestRewriteThenLastValueNewApi [GOOD] >> Cdc::NewAndOldImagesLogDebezium [GOOD] >> Cdc::OldImageLogDebezium >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_PQv1 >> SystemView::AuthUsers_TableRange [GOOD] >> SystemView::AuthPermissions_ResultOrder >> KqpQuery::QueryResultsTruncated >> DataShardReadIterator::ShouldReadNotExistingRange [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk1_100 >> KqpQuery::UdfMemoryLimit [GOOD] >> KqpQuery::TryToUpdateNonExistentColumn >> KqpScanArrowInChanels::AggregateWithFunction [GOOD] >> KqpScanArrowInChanels::AggregateEmptySum >> TCdcStreamTests::DropStreamImplShouldFail [GOOD] >> TCdcStreamTests::CopyTableShouldNotCopyStream >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantMultipleTenants [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantDomain >> KqpQuery::CurrentUtcTimestamp [GOOD] >> KqpQuery::DdlInDataQuery >> KqpStats::DataQueryMulti [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixRightBorder+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixRightBorder-EvWrite >> TConsoleTests::TestCreateServerlessTenant [GOOD] >> TConsoleTests::TestCreateServerlessTenantWrongSharedDb >> KqpTypes::Time64Columns+EnableTableDatetime64 [GOOD] >> SystemView::AuthOwners_ResultOrder [GOOD] >> SystemView::AuthOwners_TableRange >> KqpParams::RowsList [GOOD] >> KqpParams::MissingParameter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestRewriteThenLastValueNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:86:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:85:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:88:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:89:2057] recipient: [11:87:2116] Leader for TabletID 72057594037927937 is [11:90:2117] sender: [11:91:2057] recipient: [11:87:2116] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:90:2117] Leader for TabletID 72057594037927937 is [11:90:2117] sender: [11:110:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:56:2 ... 2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [106:54:2057] recipient: [106:51:2095] Leader for TabletID 72057594037927937 is [106:56:2097] sender: [106:57:2057] recipient: [106:51:2095] Leader for TabletID 72057594037927937 is [106:56:2097] sender: [106:74:2057] recipient: [106:14:2061] !Reboot 72057594037927937 (actor [106:56:2097]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [106:56:2097] sender: [106:127:2057] recipient: [106:36:2083] Leader for TabletID 72057594037927937 is [106:56:2097] sender: [106:130:2057] recipient: [106:14:2061] Leader for TabletID 72057594037927937 is [106:56:2097] sender: [106:131:2057] recipient: [106:129:2148] Leader for TabletID 72057594037927937 is [106:132:2149] sender: [106:133:2057] recipient: [106:129:2148] !Reboot 72057594037927937 (actor [106:56:2097]) rebooted! !Reboot 72057594037927937 (actor [106:56:2097]) tablet resolver refreshed! new actor is[106:132:2149] Leader for TabletID 72057594037927937 is [106:132:2149] sender: [106:152:2057] recipient: [106:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [107:54:2057] recipient: [107:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [107:54:2057] recipient: [107:50:2095] Leader for TabletID 72057594037927937 is [107:56:2097] sender: [107:57:2057] recipient: [107:50:2095] Leader for TabletID 72057594037927937 is [107:56:2097] sender: [107:74:2057] recipient: [107:14:2061] !Reboot 72057594037927937 (actor [107:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [107:56:2097] sender: [107:130:2057] recipient: [107:36:2083] Leader for TabletID 72057594037927937 is [107:56:2097] sender: [107:133:2057] recipient: [107:14:2061] Leader for TabletID 72057594037927937 is [107:56:2097] sender: [107:134:2057] recipient: [107:132:2151] Leader for TabletID 72057594037927937 is [107:135:2152] sender: [107:136:2057] recipient: [107:132:2151] !Reboot 72057594037927937 (actor [107:56:2097]) rebooted! !Reboot 72057594037927937 (actor [107:56:2097]) tablet resolver refreshed! new actor is[107:135:2152] Leader for TabletID 72057594037927937 is [107:135:2152] sender: [107:189:2057] recipient: [107:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [108:54:2057] recipient: [108:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [108:54:2057] recipient: [108:51:2095] Leader for TabletID 72057594037927937 is [108:56:2097] sender: [108:57:2057] recipient: [108:51:2095] Leader for TabletID 72057594037927937 is [108:56:2097] sender: [108:74:2057] recipient: [108:14:2061] !Reboot 72057594037927937 (actor [108:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [108:56:2097] sender: [108:130:2057] recipient: [108:36:2083] Leader for TabletID 72057594037927937 is [108:56:2097] sender: [108:133:2057] recipient: [108:14:2061] Leader for TabletID 72057594037927937 is [108:56:2097] sender: [108:134:2057] recipient: [108:132:2151] Leader for TabletID 72057594037927937 is [108:135:2152] sender: [108:136:2057] recipient: [108:132:2151] !Reboot 72057594037927937 (actor [108:56:2097]) rebooted! !Reboot 72057594037927937 (actor [108:56:2097]) tablet resolver refreshed! new actor is[108:135:2152] Leader for TabletID 72057594037927937 is [108:135:2152] sender: [108:189:2057] recipient: [108:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [109:54:2057] recipient: [109:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [109:54:2057] recipient: [109:50:2095] Leader for TabletID 72057594037927937 is [109:56:2097] sender: [109:57:2057] recipient: [109:50:2095] Leader for TabletID 72057594037927937 is [109:56:2097] sender: [109:74:2057] recipient: [109:14:2061] !Reboot 72057594037927937 (actor [109:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [109:56:2097] sender: [109:131:2057] recipient: [109:36:2083] Leader for TabletID 72057594037927937 is [109:56:2097] sender: [109:134:2057] recipient: [109:133:2151] Leader for TabletID 72057594037927937 is [109:56:2097] sender: [109:135:2057] recipient: [109:14:2061] Leader for TabletID 72057594037927937 is [109:136:2152] sender: [109:137:2057] recipient: [109:133:2151] !Reboot 72057594037927937 (actor [109:56:2097]) rebooted! !Reboot 72057594037927937 (actor [109:56:2097]) tablet resolver refreshed! new actor is[109:136:2152] Leader for TabletID 72057594037927937 is [109:136:2152] sender: [109:154:2057] recipient: [109:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [110:54:2057] recipient: [110:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [110:54:2057] recipient: [110:52:2095] Leader for TabletID 72057594037927937 is [110:56:2097] sender: [110:57:2057] recipient: [110:52:2095] Leader for TabletID 72057594037927937 is [110:56:2097] sender: [110:74:2057] recipient: [110:14:2061] !Reboot 72057594037927937 (actor [110:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [110:56:2097] sender: [110:133:2057] recipient: [110:36:2083] Leader for TabletID 72057594037927937 is [110:56:2097] sender: [110:135:2057] recipient: [110:14:2061] Leader for TabletID 72057594037927937 is [110:56:2097] sender: [110:137:2057] recipient: [110:136:2153] Leader for TabletID 72057594037927937 is [110:138:2154] sender: [110:139:2057] recipient: [110:136:2153] !Reboot 72057594037927937 (actor [110:56:2097]) rebooted! !Reboot 72057594037927937 (actor [110:56:2097]) tablet resolver refreshed! new actor is[110:138:2154] Leader for TabletID 72057594037927937 is [110:138:2154] sender: [110:192:2057] recipient: [110:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [111:54:2057] recipient: [111:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [111:54:2057] recipient: [111:51:2095] Leader for TabletID 72057594037927937 is [111:56:2097] sender: [111:57:2057] recipient: [111:51:2095] Leader for TabletID 72057594037927937 is [111:56:2097] sender: [111:74:2057] recipient: [111:14:2061] !Reboot 72057594037927937 (actor [111:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [111:56:2097] sender: [111:133:2057] recipient: [111:36:2083] Leader for TabletID 72057594037927937 is [111:56:2097] sender: [111:136:2057] recipient: [111:135:2153] Leader for TabletID 72057594037927937 is [111:56:2097] sender: [111:137:2057] recipient: [111:14:2061] Leader for TabletID 72057594037927937 is [111:138:2154] sender: [111:139:2057] recipient: [111:135:2153] !Reboot 72057594037927937 (actor [111:56:2097]) rebooted! !Reboot 72057594037927937 (actor [111:56:2097]) tablet resolver refreshed! new actor is[111:138:2154] Leader for TabletID 72057594037927937 is [111:138:2154] sender: [111:192:2057] recipient: [111:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [112:54:2057] recipient: [112:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [112:54:2057] recipient: [112:52:2095] Leader for TabletID 72057594037927937 is [112:56:2097] sender: [112:57:2057] recipient: [112:52:2095] Leader for TabletID 72057594037927937 is [112:56:2097] sender: [112:74:2057] recipient: [112:14:2061] !Reboot 72057594037927937 (actor [112:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [112:56:2097] sender: [112:134:2057] recipient: [112:36:2083] Leader for TabletID 72057594037927937 is [112:56:2097] sender: [112:136:2057] recipient: [112:14:2061] Leader for TabletID 72057594037927937 is [112:56:2097] sender: [112:138:2057] recipient: [112:137:2153] Leader for TabletID 72057594037927937 is [112:139:2154] sender: [112:140:2057] recipient: [112:137:2153] !Reboot 72057594037927937 (actor [112:56:2097]) rebooted! !Reboot 72057594037927937 (actor [112:56:2097]) tablet resolver refreshed! new actor is[112:139:2154] Leader for TabletID 72057594037927937 is [112:139:2154] sender: [112:193:2057] recipient: [112:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [113:54:2057] recipient: [113:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [113:54:2057] recipient: [113:51:2095] Leader for TabletID 72057594037927937 is [113:56:2097] sender: [113:57:2057] recipient: [113:51:2095] Leader for TabletID 72057594037927937 is [113:56:2097] sender: [113:74:2057] recipient: [113:14:2061] !Reboot 72057594037927937 (actor [113:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [113:56:2097] sender: [113:137:2057] recipient: [113:36:2083] Leader for TabletID 72057594037927937 is [113:56:2097] sender: [113:140:2057] recipient: [113:14:2061] Leader for TabletID 72057594037927937 is [113:56:2097] sender: [113:141:2057] recipient: [113:139:2156] Leader for TabletID 72057594037927937 is [113:142:2157] sender: [113:143:2057] recipient: [113:139:2156] !Reboot 72057594037927937 (actor [113:56:2097]) rebooted! !Reboot 72057594037927937 (actor [113:56:2097]) tablet resolver refreshed! new actor is[113:142:2157] Leader for TabletID 72057594037927937 is [113:142:2157] sender: [113:196:2057] recipient: [113:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [114:54:2057] recipient: [114:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [114:54:2057] recipient: [114:50:2095] Leader for TabletID 72057594037927937 is [114:56:2097] sender: [114:57:2057] recipient: [114:50:2095] Leader for TabletID 72057594037927937 is [114:56:2097] sender: [114:74:2057] recipient: [114:14:2061] !Reboot 72057594037927937 (actor [114:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [114:56:2097] sender: [114:137:2057] recipient: [114:36:2083] Leader for TabletID 72057594037927937 is [114:56:2097] sender: [114:140:2057] recipient: [114:139:2156] Leader for TabletID 72057594037927937 is [114:56:2097] sender: [114:141:2057] recipient: [114:14:2061] Leader for TabletID 72057594037927937 is [114:142:2157] sender: [114:143:2057] recipient: [114:139:2156] !Reboot 72057594037927937 (actor [114:56:2097]) rebooted! !Reboot 72057594037927937 (actor [114:56:2097]) tablet resolver refreshed! new actor is[114:142:2157] Leader for TabletID 72057594037927937 is [114:142:2157] sender: [114:196:2057] recipient: [114:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [115:54:2057] recipient: [115:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [115:54:2057] recipient: [115:51:2095] Leader for TabletID 72057594037927937 is [115:56:2097] sender: [115:57:2057] recipient: [115:51:2095] Leader for TabletID 72057594037927937 is [115:56:2097] sender: [115:74:2057] recipient: [115:14:2061] !Reboot 72057594037927937 (actor [115:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [115:56:2097] sender: [115:138:2057] recipient: [115:36:2083] Leader for TabletID 72057594037927937 is [115:56:2097] sender: [115:141:2057] recipient: [115:14:2061] Leader for TabletID 72057594037927937 is [115:56:2097] sender: [115:142:2057] recipient: [115:140:2156] Leader for TabletID 72057594037927937 is [115:143:2157] sender: [115:144:2057] recipient: [115:140:2156] !Reboot 72057594037927937 (actor [115:56:2097]) rebooted! !Reboot 72057594037927937 (actor [115:56:2097]) tablet resolver refreshed! new actor is[115:143:2157] Leader for TabletID 72057594037927937 is [0:0:0] sender: [116:54:2057] recipient: [116:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [116:54:2057] recipient: [116:51:2095] Leader for TabletID 72057594037927937 is [116:56:2097] sender: [116:57:2057] recipient: [116:51:2095] Leader for TabletID 72057594037927937 is [116:56:2097] sender: [116:74:2057] recipient: [116:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::DataQueryMulti [GOOD] Test command err: Trying to start YDB, gRPC: 14735, MsgBus: 29216 2025-04-17T17:14:01.582925Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494331563972197415:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:01.583008Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/002469/r3tmp/tmpBUgo7j/pdisk_1.dat 2025-04-17T17:14:01.933797Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14735, node 1 2025-04-17T17:14:02.009203Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:14:02.009315Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:14:02.017410Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:14:02.072561Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:14:02.072580Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:14:02.072588Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:14:02.072686Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29216 TClient is connected to server localhost:29216 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:14:02.622526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:02.641578Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:14:02.653517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:02.795384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:02.962295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:03.049579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:04.689160Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331576857101087:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:04.689319Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:04.961370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:14:04.992859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:14:05.031686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:14:05.061975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:14:05.128384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:14:05.161684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:14:05.206058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:14:05.295082Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331581152069051:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:05.295189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:05.295494Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331581152069056:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:05.299461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:14:05.312696Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494331581152069058:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:14:05.415517Z node 1 :TX_PROXY ERROR: Actor# [1:7494331581152069116:3548] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:14:06.583782Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494331563972197415:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:06.583842Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 23289, MsgBus: 13756 2025-04-17T17:14:07.425697Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494331586613687728:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:07.425768Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/002469/r3tmp/tmp5tKFmm/pdisk_1.dat 2025-04-17T17:14:07.564085Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:14:07.594669Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:14:07.594741Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:14:07.598614Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23289, node 2 2025-04-17T17:14:07.696548Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:14:07.696567Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:14:07.696573Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:14:07.696670Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13756 TClient is connected to server localhost:13756 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:14:08.209952Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:08.253613Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:08.329948Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:08.475897Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, o ... is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T17:14:11.062282Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T17:14:11.121358Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T17:14:11.180490Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T17:14:11.205918Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T17:14:11.275312Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T17:14:11.317601Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T17:14:11.372054Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494331603793559361:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:11.372145Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:11.372493Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494331603793559366:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:11.376377Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T17:14:11.388266Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494331603793559368:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T17:14:11.485263Z node 2 :TX_PROXY ERROR: Actor# [2:7494331603793559427:3546] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:14:12.428220Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494331586613687728:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:12.428296Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 12993, MsgBus: 9429 2025-04-17T17:14:13.802100Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494331615025515737:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:13.802639Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/002469/r3tmp/tmpNDUmjx/pdisk_1.dat 2025-04-17T17:14:13.986252Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:14:14.021573Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:14:14.021666Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:14:14.029191Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12993, node 3 2025-04-17T17:14:14.097480Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:14:14.097508Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:14:14.097519Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:14:14.097646Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9429 TClient is connected to server localhost:9429 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:14:14.656316Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:14.661774Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T17:14:14.669922Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:14.733101Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:14.940202Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:15.036903Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:17.330831Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494331632205386684:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:17.330903Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:17.392081Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T17:14:17.431567Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T17:14:17.467902Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T17:14:17.511007Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T17:14:17.544925Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T17:14:17.595295Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T17:14:17.673224Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T17:14:17.754477Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494331632205387348:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:17.754562Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:17.754906Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494331632205387353:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:17.758672Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T17:14:17.768548Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494331632205387355:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T17:14:17.832832Z node 3 :TX_PROXY ERROR: Actor# [3:7494331632205387410:3542] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:14:18.802311Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494331615025515737:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:18.802379Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> AsyncIndexChangeExchange::ShouldDeliverChangesOnAlteredTable [GOOD] >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterDroppingIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpTypes::Time64Columns+EnableTableDatetime64 [GOOD] Test command err: Trying to start YDB, gRPC: 26109, MsgBus: 32139 2025-04-17T17:14:03.624585Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494331573183242859:2077];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:03.644452Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/002468/r3tmp/tmpF12ILI/pdisk_1.dat 2025-04-17T17:14:04.315960Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:14:04.339598Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:14:04.339712Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:14:04.341337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26109, node 1 2025-04-17T17:14:04.401657Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:14:04.401680Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:14:04.401693Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:14:04.401806Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32139 TClient is connected to server localhost:32139 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:14:04.969267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:14:05.001195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T17:14:05.135550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:05.301087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:14:05.371028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T17:14:07.070814Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331590363113792:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:07.070971Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:07.417228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:14:07.452629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:14:07.533461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:14:07.566634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:14:07.638688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:14:07.687556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:14:07.760205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:14:07.846231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331590363114463:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:07.846311Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:07.846333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331590363114468:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:07.850283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:14:07.872607Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494331590363114470:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:14:07.964069Z node 1 :TX_PROXY ERROR: Actor# [1:7494331590363114528:3555] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:14:08.626203Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494331573183242859:2077];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:08.626268Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 31167, MsgBus: 30634 2025-04-17T17:14:10.133486Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494331602609141989:2080];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:10.171756Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/002468/r3tmp/tmpDy5MCO/pdisk_1.dat 2025-04-17T17:14:10.284478Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:14:10.301864Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:14:10.301943Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:14:10.308767Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31167, node 2 2025-04-17T17:14:10.395702Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:14:10.395723Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:14:10.395730Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:14:10.395837Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30634 TClient is connected to server localhost:30634 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:14:10.846977Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:10.853227Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T17:14:10.859262Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:10.933366Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:11.069191Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:14:11.141860Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T17:14:13.514136Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494331615494045629:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:13.514207Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:13.575638Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T17:14:13.622517Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T17:14:13.663890Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T17:14:13.745362Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T17:14:13.779848Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T17:14:13.857631Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T17:14:13.939308Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T17:14:14.037109Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494331619789013602:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:14.037193Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:14.037748Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494331619789013607:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:14.041820Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T17:14:14.060770Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494331619789013609:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T17:14:14.147310Z node 2 :TX_PROXY ERROR: Actor# [2:7494331619789013664:3547] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:14:15.135327Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494331602609141989:2080];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:15.135394Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 5284, MsgBus: 8945 2025-04-17T17:14:16.261698Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494331625587262287:2135];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:16.261741Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/002468/r3tmp/tmpPhIiQi/pdisk_1.dat 2025-04-17T17:14:16.536925Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5284, node 3 2025-04-17T17:14:16.600091Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:14:16.600134Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:14:16.600142Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:14:16.600257Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:14:16.633603Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:14:16.633678Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:14:16.635705Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8945 TClient is connected to server localhost:8945 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:14:17.053439Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:17.061875Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:14:19.464553Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494331638472164756:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:19.464713Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:19.472691Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T17:14:19.558641Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494331638472164858:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:19.558759Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:19.559335Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494331638472164863:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:19.563236Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T17:14:19.582437Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494331638472164865:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T17:14:19.638391Z node 3 :TX_PROXY ERROR: Actor# [3:7494331638472164916:2395] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantDomain [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriptionEraser >> DataShardReadIterator::ShouldReadFromHead [GOOD] >> DataShardReadIterator::ShouldReadFromHeadWithConflict+UseSink >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips2+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips2-EvWrite >> TCdcStreamTests::CopyTableShouldNotCopyStream [GOOD] >> TCdcStreamTests::MoveTableShouldFail >> SystemView::TabletsRanges [GOOD] >> SystemView::TabletsRangesPredicateExtractDisabled >> KqpLimits::CancelAfterRwTx+useSink >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit1000 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit1001 >> KqpStats::StreamLookupStats+StreamLookupJoin >> Cdc::OldImageLogDebezium [GOOD] >> Cdc::NewImageLogDebezium >> TConsoleConfigHelpersTests::TestConfigSubscriptionEraser [GOOD] >> FeatureFlagsConfiguratorTest::TestFeatureFlagsUpdates >> KqpScanArrowInChanels::AggregateByColumn [GOOD] >> SystemView::AuthGroupMembers_ResultOrder [GOOD] >> SystemView::AuthGroupMembers_TableRange >> TConsoleTests::TestCreateServerlessTenantWrongSharedDb [GOOD] >> TConsoleTests::TestCreateTenantWrongName |76.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |76.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |76.0%| [LD] {RESULT} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut >> KqpLimits::QSReplySizeEnsureMemoryLimits+useSink >> TopicAutoscaling::ControlPlane_CDC_Disable [GOOD] >> TopicAutoscaling::ReadFromTimestamp_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReadFromTimestamp_AutoscaleAwareSDK >> TCdcStreamTests::MoveTableShouldFail [GOOD] >> TCdcStreamTests::CheckSchemeLimits >> KqpScanArrowFormat::AggregateWithFunction [GOOD] >> DataShardReadIteratorLatency::ReadSplitLatency [GOOD] >> DataShardReadIteratorPageFaults::CancelPageFaultedReadThenDropTable >> FeatureFlagsConfiguratorTest::TestFeatureFlagsUpdates [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowInChanels::AggregateByColumn [GOOD] Test command err: Trying to start YDB, gRPC: 32475, MsgBus: 20457 2025-04-17T17:13:55.965528Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494331536156440982:2130];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:13:55.965588Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/001ef7/r3tmp/tmpQTPAXi/pdisk_1.dat 2025-04-17T17:13:56.572897Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:13:56.572986Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:13:56.574317Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:13:56.616048Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32475, node 1 2025-04-17T17:13:56.716007Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:13:56.716038Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:13:56.716048Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:13:56.716185Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20457 TClient is connected to server localhost:20457 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:13:57.360445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:13:57.386129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:13:57.541057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:13:57.790104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:13:57.868440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T17:13:59.510077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331553336311873:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:13:59.510182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:13:59.846706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:13:59.874513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:13:59.902222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:13:59.947307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:14:00.026103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:14:00.074661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:14:00.177569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:14:00.272631Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331557631279841:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:00.272714Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:00.273011Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331557631279846:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:00.277760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:14:00.293491Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494331557631279848:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:14:00.364725Z node 1 :TX_PROXY ERROR: Actor# [1:7494331557631279903:3547] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:14:00.967133Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494331536156440982:2130];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:00.967205Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:14:01.508440Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744910041546, txId: 281474976710672] shutting down Trying to start YDB, gRPC: 30400, MsgBus: 28933 2025-04-17T17:14:02.320585Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494331567488572996:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:02.330639Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/001ef7/r3tmp/tmpvmPRVV/pdisk_1.dat 2025-04-17T17:14:02.502259Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:14:02.509097Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:14:02.509186Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:14:02.517418Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30400, node 2 2025-04-17T17:14:02.580599Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:14:02.580622Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:14:02.580629Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:14:02.580739Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28933 TClient is connected to server localhost:28933 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:14:03.036033Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:03.052963Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:03.144011Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:03.291823Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchem ... T17:14:12.076209Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:14:12.114255Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:14:12.180018Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:14:12.226821Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:14:12.335698Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:14:12.470940Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494331610234721576:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:12.471060Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:12.471896Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494331610234721582:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:12.476871Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:14:12.495176Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-04-17T17:14:12.496245Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494331610234721584:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:14:12.568053Z node 3 :TX_PROXY ERROR: Actor# [3:7494331610234721641:3551] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:14:13.582655Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494331593054850040:2138];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:13.582742Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:14:14.836769Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744910054181, txId: 281474976710672] shutting down Trying to start YDB, gRPC: 24667, MsgBus: 9665 2025-04-17T17:14:15.793992Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494331624121140808:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:15.802866Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/001ef7/r3tmp/tmpXSBFnI/pdisk_1.dat 2025-04-17T17:14:15.939154Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:14:16.023069Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:14:16.023165Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:14:16.024627Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24667, node 4 2025-04-17T17:14:16.084715Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:14:16.084740Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:14:16.084747Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:14:16.084869Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9665 TClient is connected to server localhost:9665 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:14:16.626845Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:14:16.654596Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T17:14:16.767481Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:17.001533Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:17.088387Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:19.377410Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494331641301011765:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:19.377539Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:19.425866Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T17:14:19.468291Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T17:14:19.514699Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T17:14:19.564414Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T17:14:19.615128Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T17:14:19.700905Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T17:14:19.785801Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T17:14:19.892564Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494331641301012439:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:19.892707Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:19.893888Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494331641301012444:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:19.897491Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T17:14:19.911986Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494331641301012446:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T17:14:20.007380Z node 4 :TX_PROXY ERROR: Actor# [4:7494331645595979799:3552] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:14:20.793023Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494331624121140808:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:20.793115Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:14:22.625189Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744910061783, txId: 281474976715672] shutting down >> KqpQuery::TryToUpdateNonExistentColumn [GOOD] >> KqpQuery::QueryResultsTruncated [GOOD] >> KqpQuery::QueryStats+UseSink >> KqpQuery::DdlInDataQuery [GOOD] >> KqpQuery::DeleteWhereInSubquery >> KqpQuery::ReadOverloaded-StreamLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> FeatureFlagsConfiguratorTest::TestFeatureFlagsUpdates [GOOD] Test command err: 2025-04-17T17:13:56.267293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:13:56.267364Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:13:57.029879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:13:58.799099Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2166} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.133507s 2025-04-17T17:13:58.799204Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:666} StateWork event processing took too much time Type# 2146435078 Duration# 0.133629s 2025-04-17T17:14:00.064788Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:14:00.064851Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:14:00.109830Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:14:01.166957Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:14:01.167011Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:14:01.220290Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:14:02.236762Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:14:02.236822Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:14:02.286942Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:14:03.911311Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:14:03.911389Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:14:03.959513Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:14:13.017778Z node 15 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:14:13.017864Z node 15 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:14:13.112972Z node 15 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:14:14.431367Z node 16 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:14:14.431457Z node 16 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:14:14.481241Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:14:19.898262Z node 21 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:14:19.898360Z node 21 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:14:19.949014Z node 21 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:14:21.261161Z node 22 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:14:21.261245Z node 22 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:14:21.309057Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:14:22.479057Z node 23 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:14:22.479160Z node 23 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:14:22.528756Z node 23 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:14:23.817121Z node 24 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:14:23.817211Z node 24 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:14:23.868369Z node 24 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 >> DataShardReadIterator::ShouldReadRangeChunk1_100 [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::ControlPlane_CDC_Disable [GOOD] Test command err: 2025-04-17T17:12:11.211262Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494331092061625075:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:12:11.232692Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:12:14.340795Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/0017e4/r3tmp/tmptEpCzC/pdisk_1.dat 2025-04-17T17:12:16.746286Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494331092061625075:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:12:16.746395Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:16.746411Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:12:17.220697Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:12:17.221434Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:12:17.258629Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:12:17.574798Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:12:17.739933Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2166} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.103951s 2025-04-17T17:12:17.740372Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:666} StateWork event processing took too much time Type# 2146435078 Duration# 0.104409s TServer::EnableGrpc on GrpcPort 29996, node 1 2025-04-17T17:12:19.796590Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6vg8/0017e4/r3tmp/yandexlhhzCC.tmp 2025-04-17T17:12:19.796612Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6vg8/0017e4/r3tmp/yandexlhhzCC.tmp 2025-04-17T17:12:19.797639Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6vg8/0017e4/r3tmp/yandexlhhzCC.tmp 2025-04-17T17:12:19.797730Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:12:20.094838Z INFO: TTestServer started on Port 12590 GrpcPort 29996 TClient is connected to server localhost:12590 PQClient connected to localhost:29996 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:12:24.908836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:12:25.070891Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T17:12:25.150288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-17T17:12:28.591133Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-04-17T17:12:32.552272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:12:32.552296Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:12:36.436015Z node 1 :KQP_PROXY ERROR: TraceId: "01js2bsnqw7eczkkhhzxw6fk7k", Request deadline has expired for 2.828882s seconds 2025-04-17T17:12:36.738360Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331199435808334:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:36.765845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:36.766337Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331199435808347:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:36.770679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-17T17:12:36.801063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494331199435808349:2358], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-17T17:12:36.907147Z node 1 :TX_PROXY ERROR: Actor# [1:7494331199435808400:2489] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:12:39.555113Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494331199435808417:2362], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:12:39.564050Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZGE4NWViOGMtYTE3YWY4ODMtZWUwYmE3ZDQtOGUxNmJjZWQ=, ActorId: [1:7494331199435808332:2352], ActorState: ExecuteState, TraceId: 01js2bsxakchwtg51m3ejnw3z4, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:12:39.572969Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:12:39.584592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:12:39.681462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:12:39.882383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-17T17:12:45.002722Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } TxMeta { } YdbResults { columns { name: "C.name" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.balancer" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.local" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.enabled" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.weight" type { optional_type { item { type_id: UINT64 } } } } columns { name: "V.version" type { optional_type { item { type_id: INT64 } } } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 2552 } === CheckClustersList. Subcribe to ClusterTracker from [1:7494331238090514470:2729] === CheckClustersList. Ok 2025-04-17T17:12:50.990591Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-17T17:12:51.037678Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-17T17:12:51.041122Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7494331263860318480:2856], Recipient [1:7494331113536461997:2210]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:12:51.041162Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:12:51.041186Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-17T17:12:51.041238Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7494331263860318476:2853], Recipient [1:7494331113536461997:2210]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-04-17T17:12:51.041254Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-17T17:12:51.136529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelP ... 94331610926210093:2174]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:14:22.832120Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:14:22.832134Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-17T17:14:22.832172Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [7:7494331653875884377:2925], Recipient [7:7494331610926210093:2174]: {TEvModifySchemeTransaction txid# 281474976715674 TabletId# 72057594046644480} 2025-04-17T17:14:22.832188Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-17T17:14:22.834820Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "Root/origin/feed" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "streamImpl" PathId: 15 TotalGroupCount: 3 PartitionPerTablet: 2 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "feed" TopicPath: "/Root/origin/feed/streamImpl" YdbDatabasePath: "/Root" MeteringMode: METERING_MODE_REQUEST_UNITS PartitionStrategy { MinPartitionCount: 3 MaxPartitionCount: 107 ScaleThresholdSeconds: 30 PartitionStrategyType: DISABLED } } Partitions { PartitionId: 0 TabletId: 72075186224037893 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186224037894 NextPartitionId: 1 } ApplyIf { PathId: 15 PathVersion: 2 } AllowAccessToPrivatePaths: true } TxId: 281474976715674 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-17T17:14:22.835213Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: Root/origin/feed/streamImpl, pathId: [OwnerId: 72057594046644480, LocalPathId: 15], opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T17:14:22.835386Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715674:1, propose status:StatusInvalidParameter, reason: Can`t disable auto partitioning., at schemeshard: 72057594046644480 2025-04-17T17:14:22.835645Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-17T17:14:22.836223Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715674, response: Status: StatusInvalidParameter Reason: "Can`t disable auto partitioning." TxId: 281474976715674 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-04-17T17:14:22.836428Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715674, database: /Root, subject: root@builtin, status: StatusInvalidParameter, reason: Can`t disable auto partitioning., operation: ALTER PERSISTENT QUEUE, path: Root/origin/feed/streamImpl 2025-04-17T17:14:22.836466Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T17:14:22.836635Z node 7 :TX_PROXY ERROR: Actor# [7:7494331653875884377:2925] txid# 281474976715674, issues: { message: "Can`t disable auto partitioning." severity: 1 } 2025-04-17T17:14:22.836905Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:7494331653875884380:2927], Recipient [7:7494331610926210093:2174]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:14:22.836947Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:14:22.836968Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-04-17T17:14:22.897078Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:7494331653875884387:2930], Recipient [7:7494331610926210093:2174]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:14:22.897129Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:14:22.897158Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-17T17:14:22.897390Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [7:7494331610926210362:2317], Recipient [7:7494331610926210093:2174]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 3 Generation: 1 Round: 0 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { Memory: 82488 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 7 StartTime: 1744910052873 TableOwnerId: 72057594046644480 FollowerId: 0 2025-04-17T17:14:22.897419Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-17T17:14:22.897457Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-04-17T17:14:22.897566Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-17T17:14:22.897606Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.099991s, queue# 1 2025-04-17T17:14:22.904518Z node 7 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [7:7494331653875884244:2477], Partition 0, Sender [0:0:0], Recipient [7:7494331653875884323:2483], Cookie: 0 2025-04-17T17:14:22.904631Z node 7 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [7:7494331653875884323:2483]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:14:22.904668Z node 7 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:14:22.904723Z node 7 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:14:22.904826Z node 7 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:14:22.904857Z node 7 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:14:22.904894Z node 7 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:14:22.999036Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [7:7494331610926210093:2174]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-17T17:14:22.999106Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-17T17:14:22.999132Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-17T17:14:22.999196Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-17T17:14:22.999221Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-04-17T17:14:22.999303Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:1 data size 0 row count 0 2025-04-17T17:14:22.999391Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=SourceIdMeta2, is column=0, is olap=0, RowCount 0, DataSize 0 2025-04-17T17:14:22.999412Z node 7 :FLAT_TX_SCHEMESHARD TRACE: BuildStatsForCollector: datashardId 72075186224037888, followerId 0 2025-04-17T17:14:22.999526Z node 7 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:1 with partCount# 0, rowCount# 0, searchHeight# 0, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2025-04-17T17:14:22.999584Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186224037888 2025-04-17T17:14:22.999676Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T17:14:23.000871Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [7:7494331610926210093:2174]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-17T17:14:23.000933Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-17T17:14:23.000955Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-17T17:14:23.005198Z node 7 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [7:7494331653875884244:2477], Partition 0, Sender [0:0:0], Recipient [7:7494331653875884323:2483], Cookie: 0 2025-04-17T17:14:23.005261Z node 7 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [7:7494331653875884323:2483]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:14:23.005290Z node 7 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:14:23.005340Z node 7 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:14:23.005430Z node 7 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:14:23.005462Z node 7 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:14:23.005501Z node 7 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:14:23.110624Z node 7 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [7:7494331653875884244:2477], Partition 0, Sender [0:0:0], Recipient [7:7494331653875884323:2483], Cookie: 0 2025-04-17T17:14:23.110717Z node 7 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [7:7494331653875884323:2483]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:14:23.110754Z node 7 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:14:23.110806Z node 7 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:14:23.110886Z node 7 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:14:23.110918Z node 7 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:14:23.110955Z node 7 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 |76.1%| [TA] $(B)/ydb/core/keyvalue/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TopicAutoscaling::PartitionSplit_PreferedPartition_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_existed_AutoscaleAwareSDK ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowFormat::AggregateWithFunction [GOOD] Test command err: Trying to start YDB, gRPC: 16674, MsgBus: 20333 2025-04-17T17:13:55.082591Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494331538650905599:2137];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:13:55.082646Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/001f04/r3tmp/tmpDSt5mh/pdisk_1.dat 2025-04-17T17:13:55.850033Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:13:55.852004Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:13:55.852120Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:13:55.853829Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16674, node 1 2025-04-17T17:13:56.044181Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:13:56.044202Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:13:56.044208Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:13:56.044314Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20333 TClient is connected to server localhost:20333 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:13:56.950863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:13:57.013196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T17:13:57.225690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:13:57.456402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:13:57.646866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:13:59.222940Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331555830776462:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:13:59.223064Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:13:59.571235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:13:59.649434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:13:59.711187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:13:59.790596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:13:59.834832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:13:59.885133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:13:59.961671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:14:00.029832Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331560125744425:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:00.029945Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:00.030245Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331560125744430:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:00.034889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:14:00.049257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494331560125744432:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:14:00.083542Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494331538650905599:2137];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:00.083602Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:14:00.130616Z node 1 :TX_PROXY ERROR: Actor# [1:7494331560125744489:3547] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:14:01.171769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 864000000000 2025-04-17T17:14:01.507175Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744910041539, txId: 281474976710674] shutting down Trying to start YDB, gRPC: 12067, MsgBus: 4801 2025-04-17T17:14:02.305163Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494331566023316666:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:02.305218Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/001f04/r3tmp/tmpXt2cey/pdisk_1.dat 2025-04-17T17:14:02.449155Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12067, node 2 2025-04-17T17:14:02.543103Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:14:02.543194Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:14:02.550827Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:14:02.591503Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:14:02.591525Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:14:02.591531Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:14:02.591662Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4801 TClient is connected to server localhost:4801 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:14:03.022240Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:03.029064Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T17:14:03.039593Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 wa ... proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T17:14:11.723541Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T17:14:11.776367Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T17:14:11.860772Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T17:14:11.933985Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T17:14:12.028008Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T17:14:12.107299Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494331608448440461:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:12.107408Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:12.107475Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494331608448440466:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:12.110588Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T17:14:12.121515Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494331608448440468:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T17:14:12.211037Z node 3 :TX_PROXY ERROR: Actor# [3:7494331608448440524:3544] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:14:13.272239Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494331591268568887:2106];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:13.272336Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:14:16.097804Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744910054139, txId: 281474976715672] shutting down Trying to start YDB, gRPC: 23186, MsgBus: 9871 2025-04-17T17:14:17.086401Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494331629291778322:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:17.086436Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/001f04/r3tmp/tmpNtu93g/pdisk_1.dat 2025-04-17T17:14:17.233552Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:14:17.262447Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:14:17.262548Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:14:17.263796Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23186, node 4 2025-04-17T17:14:17.340358Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:14:17.340394Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:14:17.340404Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:14:17.340546Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9871 TClient is connected to server localhost:9871 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T17:14:17.953501Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T17:14:17.970718Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:18.053530Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:18.256292Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:18.345454Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:20.859028Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494331642176681950:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:20.859117Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:20.924612Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:14:20.977677Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:14:21.025914Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:14:21.114341Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:14:21.162793Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:14:21.239430Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:14:21.277768Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:14:21.339627Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494331646471649919:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:21.339777Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:21.346765Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494331646471649924:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:21.350870Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:14:21.366282Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494331646471649926:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:14:21.451462Z node 4 :TX_PROXY ERROR: Actor# [4:7494331646471649983:3546] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:14:22.086849Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494331629291778322:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:22.095184Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:14:23.547161Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744910063064, txId: 281474976710672] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::TryToUpdateNonExistentColumn [GOOD] Test command err: Trying to start YDB, gRPC: 5227, MsgBus: 32376 2025-04-17T17:14:05.845017Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494331578283981617:2080];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:05.851014Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/002466/r3tmp/tmprPFAV1/pdisk_1.dat 2025-04-17T17:14:06.267381Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:14:06.295078Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:14:06.295164Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:14:06.301078Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5227, node 1 2025-04-17T17:14:06.435863Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:14:06.435884Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:14:06.435892Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:14:06.436009Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32376 TClient is connected to server localhost:32376 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:14:07.002913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:07.029248Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:14:07.040720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:07.237756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:07.399464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:07.483015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:09.184051Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331595463852547:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:09.184201Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:09.581484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:14:09.613769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:14:09.650431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:14:09.690988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:14:09.722958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:14:09.759250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:14:09.792864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:14:09.877338Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331595463853212:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:09.877410Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:09.877648Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331595463853217:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:09.880903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:14:09.893250Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494331595463853219:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:14:09.967875Z node 1 :TX_PROXY ERROR: Actor# [1:7494331595463853276:3551] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:14:10.851029Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494331578283981617:2080];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:10.851114Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:14:11.500311Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494331604053788272:2522], TxId: 281474976710672, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=Y2ZmMzI3NC1iYTI4YWMyLWY5MjVlNThmLTMxMjM0MWRk. TraceId : 01js2bwswxdwwxs069awrtmxbg. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED DEFAULT_ERROR: {
: Error: Terminate was called, reason(17): Bad filter value. }. 2025-04-17T17:14:11.500845Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494331604053788273:2523], TxId: 281474976710672, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=Y2ZmMzI3NC1iYTI4YWMyLWY5MjVlNThmLTMxMjM0MWRk. TraceId : 01js2bwswxdwwxs069awrtmxbg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7494331604053788250:2514], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-17T17:14:11.501311Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2ZmMzI3NC1iYTI4YWMyLWY5MjVlNThmLTMxMjM0MWRk, ActorId: [1:7494331604053788208:2514], ActorState: ExecuteState, TraceId: 01js2bwswxdwwxs069awrtmxbg, Create QueryResponse for error on request, msg:
: Error: Terminate was called, reason(17): Bad filter value. Trying to start YDB, gRPC: 63729, MsgBus: 10112 2025-04-17T17:14:12.491608Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494331610991295273:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:12.491696Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/002466/r3tmp/tmpWBP7oO/pdisk_1.dat 2025-04-17T17:14:12.622161Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:14:12.648238Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:14:12.648320Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:14:12.653631Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63729, node 2 2025-04-17T17:14:12.736209Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:14:12.736231Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:14:12.736238Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:14:12.736353Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10112 TClient is connected to server localhost:10112 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 Secu ... eTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T17:14:16.102358Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T17:14:16.153916Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T17:14:16.194961Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T17:14:16.239621Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T17:14:16.328442Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494331628171166754:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:16.328524Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:16.328775Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494331628171166759:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:16.333251Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T17:14:16.348839Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494331628171166761:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T17:14:16.439240Z node 2 :TX_PROXY ERROR: Actor# [2:7494331628171166818:3544] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:14:17.500213Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494331610991295273:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:17.500272Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 20879, MsgBus: 1166 2025-04-17T17:14:18.786835Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494331633601727625:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:18.786895Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/002466/r3tmp/tmpAOzSgE/pdisk_1.dat 2025-04-17T17:14:18.916653Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:14:18.933794Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:14:18.933881Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:14:18.935660Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20879, node 3 2025-04-17T17:14:19.004578Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:14:19.004597Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:14:19.004603Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:14:19.004709Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1166 TClient is connected to server localhost:1166 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:14:19.525862Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:19.542890Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:19.627834Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:19.859124Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:19.952396Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:22.396258Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494331650781598567:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:22.396363Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:22.453654Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T17:14:22.494570Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T17:14:22.527937Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T17:14:22.607544Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T17:14:22.656483Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T17:14:22.729718Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T17:14:22.798028Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T17:14:22.898861Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494331650781599241:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:22.898985Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:22.899099Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494331650781599246:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:22.903020Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T17:14:22.916460Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494331650781599248:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T17:14:23.013824Z node 3 :TX_PROXY ERROR: Actor# [3:7494331655076566600:3546] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:14:23.787892Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494331633601727625:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:23.787999Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:14:24.067561Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7494331659371534234:2518], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:3:84: Error: At function: KiUpdateTable!
:3:84: Error: Column 'NonExistentColumn' does not exist in table '/Root/KeyValue'., code: 2017 2025-04-17T17:14:24.069243Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NjA5M2VkOWQtNDRhZWIzM2QtYWQyN2I0NjQtOTU1YmZjNDQ=, ActorId: [3:7494331659371534226:2513], ActorState: ExecuteState, TraceId: 01js2bx6dd2yzk9xwzmqcy7a2g, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: >> TopicAutoscaling::PartitionSplit_DistributedTxCommit_CheckOffsetCommitForDifferentCases_NotSplitedTopic [GOOD] >> TopicAutoscaling::PartitionSplit_AutosplitByLoad >> KqpScanArrowInChanels::AggregateEmptySum [GOOD] >> KqpParams::MissingParameter [GOOD] >> KqpParams::ParameterTypes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::ReadOverloaded-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 64808, MsgBus: 18150 2025-04-17T17:13:59.060501Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494331552080330923:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:13:59.060543Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/002470/r3tmp/tmpKiHYOG/pdisk_1.dat 2025-04-17T17:13:59.465121Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:13:59.497507Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:13:59.497604Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:13:59.499849Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64808, node 1 2025-04-17T17:13:59.600255Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:13:59.600274Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:13:59.600281Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:13:59.600387Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18150 TClient is connected to server localhost:18150 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:14:00.213513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:00.240650Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:00.251110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T17:14:00.395529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:00.572858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:00.653702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:02.228096Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331564965234597:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:02.228291Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:02.560737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:14:02.606338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:14:02.684773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:14:02.719149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:14:02.753453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:14:02.838279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:14:02.911082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:14:02.981456Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331564965235267:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:02.981519Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:02.981646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331564965235272:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:02.984812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:14:02.993661Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494331564965235274:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:14:03.075095Z node 1 :TX_PROXY ERROR: Actor# [1:7494331569260202626:3546] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:14:04.056581Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494331552080330923:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:04.056645Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 20552, MsgBus: 13979 2025-04-17T17:14:05.268478Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494331579194862864:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:05.268508Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/002470/r3tmp/tmpgrDqc1/pdisk_1.dat 2025-04-17T17:14:05.433944Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20552, node 2 2025-04-17T17:14:05.521264Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:14:05.521396Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:14:05.523203Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:14:05.578548Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:14:05.578577Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:14:05.578586Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:14:05.578708Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13979 TClient is connected to server localhost:13979 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:14:06.037521Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:06.046111Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T17:14:06.066369Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:06.153356Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04 ... don't have access permissions } 2025-04-17T17:14:08.954778Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494331592079767195:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:08.958171Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T17:14:08.977230Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494331592079767197:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T17:14:09.035430Z node 2 :TX_PROXY ERROR: Actor# [2:7494331596374734550:3541] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:14:10.270105Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494331579194862864:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:10.270184Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 26630, MsgBus: 9069 2025-04-17T17:14:15.483796Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:14:15.484021Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:14:15.484200Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/002470/r3tmp/tmpUBZhaP/pdisk_1.dat TServer::EnableGrpc on GrpcPort 26630, node 3 2025-04-17T17:14:16.032892Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:14:16.034229Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:14:16.034299Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:14:16.034351Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:14:16.034855Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:14:16.071997Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:14:16.072361Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:14:16.084140Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9069 TClient is connected to server localhost:9069 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:14:16.565884Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:16.616140Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:16.974960Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:17.465173Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:17.842654Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:18.531707Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1806:3403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:18.531896Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:18.556429Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T17:14:18.772307Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T17:14:19.061756Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T17:14:19.342510Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T17:14:19.612454Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T17:14:20.061770Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T17:14:20.389434Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T17:14:20.769746Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:2558:3978], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:20.769913Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:20.770306Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:2563:3983], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:20.777409Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T17:14:20.943147Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:2565:3985], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T17:14:21.013141Z node 3 :TX_PROXY ERROR: Actor# [3:2632:4033] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:14:22.227632Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T17:14:22.491927Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T17:14:22.836269Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T17:14:24.624753Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:3492:4719], TxId: 281474976715675, task: 1. Ctx: { SessionId : ydb://session/3?node_id=3&id=NTEzZTc4ZDItOTQzZmQ0MDUtNDU4NDYyMDgtMjkxZDBjMTU=. TraceId : 01js2bx5md0g1sc0qkwybn4h4r. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Source[0] fatal error: {
: Error: Table '/Root/SecondaryKeys' retry limit exceeded. } 2025-04-17T17:14:24.628082Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:3492:4719], TxId: 281474976715675, task: 1. Ctx: { SessionId : ydb://session/3?node_id=3&id=NTEzZTc4ZDItOTQzZmQ0MDUtNDU4NDYyMDgtMjkxZDBjMTU=. TraceId : 01js2bx5md0g1sc0qkwybn4h4r. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: OVERLOADED DEFAULT_ERROR: {
: Error: Table '/Root/SecondaryKeys' retry limit exceeded. }. 2025-04-17T17:14:24.634727Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:3493:4720], TxId: 281474976715675, task: 2. Ctx: { TraceId : 01js2bx5md0g1sc0qkwybn4h4r. SessionId : ydb://session/3?node_id=3&id=NTEzZTc4ZDItOTQzZmQ0MDUtNDU4NDYyMDgtMjkxZDBjMTU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:3486:4234], status: OVERLOADED, reason: {
: Error: Terminate execution } 2025-04-17T17:14:24.635372Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTEzZTc4ZDItOTQzZmQ0MDUtNDU4NDYyMDgtMjkxZDBjMTU=, ActorId: [3:2891:4234], ActorState: ExecuteState, TraceId: 01js2bx5md0g1sc0qkwybn4h4r, Create QueryResponse for error on request, msg: >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterDroppingIndex [GOOD] >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterCancelIndexBuild >> TConsoleTests::TestCreateTenantWrongName [GOOD] >> Cache::Test1 [GOOD] >> TConsoleTests::TestCreateTenantWrongNameExtSubdomain >> Cache::Test2 [GOOD] >> Cache::Test3 [GOOD] >> EntityId::Order >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixRightBorder-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinue+EvWrite >> EntityId::Order [GOOD] >> EscapingBasics::EncloseSecretShouldWork [GOOD] >> EscapingBasics::EncloseAndEscapeStringShouldWork [GOOD] >> IcebergClusterProcessor::ValidateDdlCreationForHiveWithS3 [GOOD] >> IcebergClusterProcessor::ValidateRiseErrors [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowInChanels::AggregateEmptySum [GOOD] Test command err: Trying to start YDB, gRPC: 7230, MsgBus: 15248 2025-04-17T17:13:56.149060Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494331541910783898:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:13:56.149090Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/001ed5/r3tmp/tmpPbEeIk/pdisk_1.dat 2025-04-17T17:13:56.582297Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7230, node 1 2025-04-17T17:13:56.640205Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:13:56.640301Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:13:56.641874Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:13:56.701423Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:13:56.701453Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:13:56.701463Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:13:56.701578Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15248 TClient is connected to server localhost:15248 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:13:57.275594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:13:57.308176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:13:57.435150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:13:57.604128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:13:57.706584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:13:59.279199Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331554795687578:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:13:59.279329Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:13:59.562352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:13:59.593531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:13:59.664574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:13:59.701186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:13:59.742963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:13:59.811562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:13:59.868742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:13:59.956454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331554795688244:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:13:59.956554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:13:59.957115Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331554795688249:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:13:59.960561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:13:59.971788Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494331554795688251:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:14:00.038877Z node 1 :TX_PROXY ERROR: Actor# [1:7494331559090655604:3547] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:14:01.149283Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494331541910783898:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:01.149356Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:14:03.485478Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744910041784, txId: 281474976710672] shutting down Trying to start YDB, gRPC: 24171, MsgBus: 5048 2025-04-17T17:14:04.302161Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494331576335985817:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:04.302218Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/001ed5/r3tmp/tmp5LvuRu/pdisk_1.dat 2025-04-17T17:14:04.447881Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24171, node 2 2025-04-17T17:14:04.488957Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:14:04.489053Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:14:04.490971Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:14:04.504672Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:14:04.504694Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:14:04.504700Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:14:04.504798Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5048 TClient is connected to server localhost:5048 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T17:14:04.890147Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:14:04.895379Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T17:14:04.906070Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:04.962301Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waitin ... :14:15.563996Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T17:14:15.646141Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T17:14:15.692670Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T17:14:15.771157Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T17:14:15.835045Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T17:14:15.904282Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494331622666820228:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:15.904371Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:15.904419Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494331622666820233:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:15.907592Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T17:14:15.925328Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715669, at schemeshard: 72057594046644480 2025-04-17T17:14:15.925612Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494331622666820235:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T17:14:15.993306Z node 3 :TX_PROXY ERROR: Actor# [3:7494331622666820289:3544] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:14:17.208275Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494331609781916082:2217];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:17.208339Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:14:18.120438Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744910057604, txId: 281474976715672] shutting down Trying to start YDB, gRPC: 22788, MsgBus: 24599 2025-04-17T17:14:18.930341Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494331633661555179:2146];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:18.936421Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/001ed5/r3tmp/tmpqeTpI9/pdisk_1.dat 2025-04-17T17:14:19.080346Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:14:19.083235Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:14:19.083312Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:14:19.084721Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22788, node 4 2025-04-17T17:14:19.144642Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:14:19.144668Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:14:19.144675Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:14:19.144791Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24599 TClient is connected to server localhost:24599 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:14:19.623900Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:19.650737Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:19.742500Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:19.918592Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:20.011813Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:22.486097Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494331650841426027:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:22.486174Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:22.511989Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T17:14:22.590836Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T17:14:22.662970Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T17:14:22.704947Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T17:14:22.745410Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T17:14:22.821983Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T17:14:22.893981Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T17:14:23.006916Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494331655136394002:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:23.007032Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:23.007122Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494331655136394007:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:23.011517Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T17:14:23.029691Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494331655136394009:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T17:14:23.108253Z node 4 :TX_PROXY ERROR: Actor# [4:7494331655136394065:3551] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:14:23.928315Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494331633661555179:2146];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:23.928412Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:14:25.493822Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744910065094, txId: 281474976715672] shutting down |76.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> Cache::Test3 [GOOD] >> TopicAutoscaling::ControlPlane_DisableAutoPartitioning [GOOD] >> TopicAutoscaling::ControlPlane_PauseAutoPartitioning |76.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> EscapingBasics::EncloseAndEscapeStringShouldWork [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> IcebergClusterProcessor::ValidateRiseErrors [GOOD] Test command err: test case: 1 test case: 2 test case: 3 test case: 4 test case: 5 test case: 6 test case: 7 test case: 8 test case: 9 >> Normalizers::SchemaVersionsNormalizer >> Cdc::NewImageLogDebezium [GOOD] >> Cdc::NaN[PqRunner] >> KqpStats::StreamLookupStats+StreamLookupJoin [GOOD] >> KqpStats::StreamLookupStats-StreamLookupJoin >> TColumnShardTestReadWrite::WriteReadStandaloneExoticTypes >> TColumnShardTestReadWrite::ReadWithProgramNoProjection |76.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |76.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator >> DataShardReadIterator::ShouldReadFromHeadWithConflict+UseSink [GOOD] >> DataShardReadIterator::ShouldReadFromHeadWithConflict-UseSink |76.1%| [TA] {RESULT} $(B)/ydb/core/keyvalue/ut/test-results/unittest/{meta.json ... results_accumulator.log} |76.1%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator >> TCdcStreamTests::CheckSchemeLimits [GOOD] >> TCdcStreamTests::MeteringServerless >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit1001 [GOOD] >> DataShardReadIterator::ShouldNotReadFutureMvccFromFollower >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips2-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeLeftBorder+EvWrite >> SystemView::TabletsRangesPredicateExtractDisabled [GOOD] >> EvWrite::WriteWithSplit >> TKqpScanData::UnboxedValueSize [GOOD] >> TConsoleTests::TestCreateTenantWrongNameExtSubdomain [GOOD] >> TConsoleTests::TestCreateTenantWrongPool |76.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::UnboxedValueSize [GOOD] >> KqpQuery::QueryStats+UseSink [GOOD] >> KqpQuery::QueryStats-UseSink >> TColumnShardTestReadWrite::ReadWithProgramNoProjection [GOOD] >> SystemView::AuthPermissions_ResultOrder [GOOD] >> SystemView::AuthPermissions_Selects >> KqpLimits::QSReplySizeEnsureMemoryLimits+useSink [GOOD] >> KqpLimits::OutOfSpaceYQLUpsertFail-useSink >> KqpQuery::DeleteWhereInSubquery [GOOD] >> KqpQuery::DictJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadWithProgramNoProjection [GOOD] Test command err: 2025-04-17T17:14:29.943825Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T17:14:30.118211Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T17:14:30.147183Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T17:14:30.147480Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T17:14:30.155462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T17:14:30.155679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T17:14:30.155905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T17:14:30.156026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T17:14:30.156777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T17:14:30.156925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T17:14:30.157017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T17:14:30.157135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T17:14:30.157302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T17:14:30.157415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T17:14:30.157509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T17:14:30.157598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T17:14:30.192123Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T17:14:30.192325Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T17:14:30.192384Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T17:14:30.192574Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T17:14:30.192743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T17:14:30.192825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T17:14:30.192875Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T17:14:30.192968Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T17:14:30.193037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T17:14:30.193076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T17:14:30.193109Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T17:14:30.193294Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T17:14:30.193371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T17:14:30.193412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T17:14:30.193445Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T17:14:30.193528Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T17:14:30.193580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T17:14:30.193623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T17:14:30.193648Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T17:14:30.193751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T17:14:30.193790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T17:14:30.193817Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T17:14:30.193874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T17:14:30.193913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T17:14:30.193938Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T17:14:30.194324Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=69; 2025-04-17T17:14:30.194429Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=44; 2025-04-17T17:14:30.194504Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=31; 2025-04-17T17:14:30.194580Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=38; 2025-04-17T17:14:30.194752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T17:14:30.194806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T17:14:30.194847Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T17:14:30.195025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T17:14:30.195072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T17:14:30.195100Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T17:14:30.195249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T17:14:30.195293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T17:14:30.195326Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T17:14:30.195487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T17:14:30.195532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T17:14:30.195562Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T17:14:30.195668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T17:14:30.195714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T17:14:30.195760Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... ; 2025-04-17T17:14:31.019755Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:268:2286];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:100;schema=level: int32 timestamp: timestamp[us];);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-04-17T17:14:31.019789Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:268:2286];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-17T17:14:31.019936Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:268:2286];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;);columns=2;rows=100; 2025-04-17T17:14:31.020009Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:268:2286];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=1200;num_rows=100;batch_columns=level,timestamp; 2025-04-17T17:14:31.020159Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:268:2286];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:260:2278];bytes=1200;rows=100;faults=0;finished=0;fault=0;schema=level: int32 timestamp: timestamp[us]; 2025-04-17T17:14:31.020295Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:268:2286];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-04-17T17:14:31.020415Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:268:2286];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-04-17T17:14:31.020519Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:268:2286];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-04-17T17:14:31.020652Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:268:2286];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T17:14:31.020781Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:268:2286];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-04-17T17:14:31.020894Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:268:2286];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-04-17T17:14:31.020932Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:268:2286] finished for tablet 9437184 2025-04-17T17:14:31.021381Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:268:2286];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:260:2278];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.003},{"events":["l_bootstrap"],"t":0.005},{"events":["f_processing","f_task_result"],"t":0.014},{"events":["f_ack","l_task_result"],"t":0.025},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.027}],"full":{"a":1744910070993915,"name":"_full_task","f":1744910070993915,"d_finished":0,"c":0,"l":1744910071020982,"d":27067},"events":[{"name":"bootstrap","f":1744910070994193,"d_finished":4892,"c":1,"l":1744910070999085,"d":4892},{"a":1744910071020633,"name":"ack","f":1744910071019570,"d_finished":971,"c":1,"l":1744910071020541,"d":1320},{"a":1744910071020613,"name":"processing","f":1744910071008261,"d_finished":7397,"c":10,"l":1744910071020542,"d":7766},{"name":"ProduceResults","f":1744910070997716,"d_finished":2675,"c":13,"l":1744910071020920,"d":2675},{"a":1744910071020922,"name":"Finish","f":1744910071020922,"d_finished":0,"c":0,"l":1744910071020982,"d":60},{"name":"task_result","f":1744910071008282,"d_finished":6301,"c":9,"l":1744910071019489,"d":6301}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-04-17T17:14:31.021463Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:268:2286];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:260:2278];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T17:14:31.021851Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:268:2286];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:260:2278];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.003},{"events":["l_bootstrap"],"t":0.005},{"events":["f_processing","f_task_result"],"t":0.014},{"events":["f_ack","l_task_result"],"t":0.025},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.027}],"full":{"a":1744910070993915,"name":"_full_task","f":1744910070993915,"d_finished":0,"c":0,"l":1744910071021506,"d":27591},"events":[{"name":"bootstrap","f":1744910070994193,"d_finished":4892,"c":1,"l":1744910070999085,"d":4892},{"a":1744910071020633,"name":"ack","f":1744910071019570,"d_finished":971,"c":1,"l":1744910071020541,"d":1844},{"a":1744910071020613,"name":"processing","f":1744910071008261,"d_finished":7397,"c":10,"l":1744910071020542,"d":8290},{"name":"ProduceResults","f":1744910070997716,"d_finished":2675,"c":13,"l":1744910071020920,"d":2675},{"a":1744910071020922,"name":"Finish","f":1744910071020922,"d_finished":0,"c":0,"l":1744910071021506,"d":584},{"name":"task_result","f":1744910071008282,"d_finished":6301,"c":9,"l":1744910071019489,"d":6301}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-04-17T17:14:31.021930Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:268:2286];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T17:14:30.992813Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-04-17T17:14:31.021965Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:268:2286];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T17:14:31.022226Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:268:2286];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;; 2025-04-17T17:14:31.022980Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 1 at tablet 9437184 2025-04-17T17:14:31.023207Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 100 scanId: 0 version: {1744910071073:100} readable: {1744910071073:max} at tablet 9437184 2025-04-17T17:14:31.023326Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 100 scanId: 0 at tablet 9437184 2025-04-17T17:14:31.023557Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=100;scan_id=0;gen=0;table=;snapshot={1744910071073:100};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 100 } Function { Id: 1 Arguments { Id: 1 } Arguments { Id: 9 } } } } Command { Filter { Predicate { Id: 100 } } } ; 2025-04-17T17:14:31.023654Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=100;scan_id=0;gen=0;table=;snapshot={1744910071073:100};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 100 } Function { Id: 1 Arguments { Id: 1 } Arguments { Id: 9 } } } } Command { Filter { Predicate { Id: 100 } } } ; 2025-04-17T17:14:31.023798Z node 1 :TX_COLUMNSHARD_SCAN WARN: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=100;scan_id=0;gen=0;table=;snapshot={1744910071073:100};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:14;event=TTxScan failed;problem=cannot parse program;details=Can't parse SsaProgram: program has no projections; >> DataShardReadIteratorPageFaults::CancelPageFaultedReadThenDropTable [GOOD] >> DataShardReadIteratorPageFaults::LocksNotLostOnPageFault ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::TabletsRangesPredicateExtractDisabled [GOOD] Test command err: 2025-04-17T17:11:24.531548Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494330887031944025:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:11:24.548573Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/000aab/r3tmp/tmpahvCz3/pdisk_1.dat 2025-04-17T17:11:25.269024Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:25.275279Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:11:25.275344Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:11:25.282111Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23022, node 1 2025-04-17T17:11:25.783419Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:11:25.783657Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:11:25.783664Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:11:25.791366Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21956 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:11:28.965625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:11:29.131379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:11:29.173361Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494330909094631691:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:11:29.173403Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; waiting... 2025-04-17T17:11:29.248525Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:11:29.248836Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:11:29.269727Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-04-17T17:11:29.290822Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:11:29.290949Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:11:29.308210Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:11:29.322562Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-04-17T17:11:29.335102Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:11:29.554159Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494330887031944025:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:11:29.554231Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:11:30.000751Z node 1 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [1:7494330878442009309:2072], interval end# 2025-04-17T17:11:30.000000Z, event interval end# 2025-04-17T17:11:30.000000Z 2025-04-17T17:11:30.000792Z node 1 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [1:7494330878442009309:2072], query logs count# 0, processor ids count# 0, processor id to database count# 0 2025-04-17T17:11:30.006811Z node 2 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [2:7494330877810745279:2063], interval end# 2025-04-17T17:11:30.000000Z, event interval end# 2025-04-17T17:11:30.000000Z 2025-04-17T17:11:30.006872Z node 2 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [2:7494330877810745279:2063], query logs count# 0, processor ids count# 0, processor id to database count# 0 2025-04-17T17:11:30.009996Z node 4 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [4:7494330909094631696:2078], interval end# 2025-04-17T17:11:30.000000Z, event interval end# 2025-04-17T17:11:30.000000Z 2025-04-17T17:11:30.010071Z node 4 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [4:7494330909094631696:2078], query logs count# 0, processor ids count# 0, processor id to database count# 0 2025-04-17T17:11:30.024364Z node 4 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [4:7494330887619795128:2063], interval end# 2025-04-17T17:11:30.000000Z, event interval end# 2025-04-17T17:11:30.000000Z 2025-04-17T17:11:30.033787Z node 4 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [4:7494330887619795128:2063], query logs count# 0, processor ids count# 0, processor id to database count# 0 2025-04-17T17:11:30.017481Z node 5 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [5:7494330887941654976:2063], interval end# 2025-04-17T17:11:30.000000Z, event interval end# 2025-04-17T17:11:30.000000Z 2025-04-17T17:11:30.044173Z node 3 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [3:7494330884876017811:2063], interval end# 2025-04-17T17:11:30.000000Z, event interval end# 2025-04-17T17:11:30.000000Z 2025-04-17T17:11:30.017532Z node 5 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [5:7494330887941654976:2063], query logs count# 0, processor ids count# 0, processor id to database count# 0 2025-04-17T17:11:30.068494Z node 3 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [3:7494330884876017811:2063], query logs count# 0, processor ids count# 0, processor id to database count# 0 2025-04-17T17:11:30.091575Z node 5 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [5:7494330887941654976:2063] 2025-04-17T17:11:30.133761Z node 1 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [1:7494330887031944036:2080], interval end# 2025-04-17T17:11:30.000000Z, event interval end# 2025-04-17T17:11:30.000000Z 2025-04-17T17:11:30.133793Z node 1 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [1:7494330887031944036:2080], query logs count# 0, processor ids count# 0, processor id to database count# 0 2025-04-17T17:11:30.133807Z node 1 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [1:7494330887031944036:2080] 2025-04-17T17:11:30.273070Z node 5 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector bootstrapped 2025-04-17T17:11:30.288279Z node 4 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [4:7494330887619795128:2063] 2025-04-17T17:11:30.297096Z node 5 :SYSTEM_VIEWS INFO: [72075186224037893] OnActivateExecutor 2025-04-17T17:11:30.297135Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInitSchema::Execute 2025-04-17T17:11:30.304555Z node 5 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [5:7494330909416491541:2077], interval end# 2025-04-17T17:11:30.000000Z, event interval end# 2025-04-17T17:11:30.000000Z 2025-04-17T17:11:30.304579Z node 5 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [5:7494330909416491541:2077], query logs count# 0, processor ids count# 0, processor id to database count# 0 2025-04-17T17:11:30.304589Z node 5 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [5:7494330909416491541:2077] 2025-04-17T17:11:30.304628Z node 5 :SYSTEM_VIEWS DEBUG: Handle TEvSysView::TEvRegisterDbCounters: service id# [5:7494330909416491541:2077], path id# [OwnerId: 72057594046644480, LocalPathId: 2], service# 2 2025-04-17T17:11:30.304637Z node 5 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [5:7494330909416491541:2077] 2025-04-17T17:11:30.304725Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:30.316301Z node 4 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [4:7494330909094631696:2078] 2025-04-17T17:11:30.315730Z node 5 :SYSTEM_VIEWS INFO: Navigate by path id succeeded: service id# [5:7494330909416491541:2077], path id# [OwnerId: 72057594046644480, LocalPathId: 2], database# /Root/Tenant1 2025-04-17T17:11:30.317822Z node 5 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [5:7494330909416491541:2077], database# /Root/Tenant1, no sysview processor 2025-04-17T17:11:30.329539Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInitSchema::Complete 2025-04-17T17:11:30.329729Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInit::Execute 2025-04-17T17:11:30.334254Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading interval summaries: query count# 0, node ids count# 0, total count# 0 2025-04-17T17:11:30.334280Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading interval metrics: query count# 0 2025-04-17T17:11:30.334450Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading interval query tops: total query count# 0 2025-04-17T17:11:30.334596Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading nodes to request: nodes count# 0, hashes count# 0 2025-04-17T17:11:30.334733Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 6, result count# 0 2025-04-17T17:11:30.334854Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 7, result count# 0 2025-04-17T17:11:30.335140Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 8, result count# 0 2025-04-17T17:11:30.335160Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 9, result count# 0 2025-04-17T17:11:30.335320Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 10, result count# 0 2025-04-17T17:11:30.335437Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 11, result count# 0 2025-04-17T17:11:30.335546Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 12, result count# 0 2025-04-17T17:11:30.335668Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 13, result count# 0 2025-04-17T17:11:30.335782Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 14, result count# 0 2025-04-17T17:11:30.335802Z node 5 :SYSTE ... 3, finished: 1 2025-04-17T17:14:21.121651Z node 19 :SYSTEM_VIEWS INFO: Scan finished, actor: [19:7494331648020636336:2427], owner: [19:7494331648020636333:2425], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-17T17:14:21.125346Z node 19 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744910061116, txId: 281474976715673] shutting down 2025-04-17T17:14:21.322952Z node 19 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01js2bx3j93gy4w8kgq6qhv0t3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=19&id=NzY1YTM3Y2UtODQxNTdmNmQtMzVlMDI5YTctNmNmZWUyZjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:14:21.325155Z node 19 :SYSTEM_VIEWS INFO: Scan started, actor: [19:7494331648020636370:2437], owner: [19:7494331648020636367:2435], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-17T17:14:21.326089Z node 19 :SYSTEM_VIEWS INFO: Scan prepared, actor: [19:7494331648020636370:2437], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-17T17:14:21.326451Z node 19 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [19:7494331648020636370:2437], row count: 3, finished: 1 2025-04-17T17:14:21.326496Z node 19 :SYSTEM_VIEWS INFO: Scan finished, actor: [19:7494331648020636370:2437], owner: [19:7494331648020636367:2435], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-17T17:14:21.329606Z node 19 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744910061321, txId: 281474976715675] shutting down 2025-04-17T17:14:21.523961Z node 19 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01js2bx3rv9gffyq1jrgv412c2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=19&id=Y2VhMzUyMWItY2FiMDY3ZjEtNjM3YWU3NzYtNTc4MDk4ZjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:14:21.526598Z node 19 :SYSTEM_VIEWS INFO: Scan started, actor: [19:7494331648020636402:2446], owner: [19:7494331648020636399:2444], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-17T17:14:21.528096Z node 19 :SYSTEM_VIEWS INFO: Scan prepared, actor: [19:7494331648020636402:2446], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-17T17:14:21.528835Z node 19 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [19:7494331648020636402:2446], row count: 4, finished: 1 2025-04-17T17:14:21.528874Z node 19 :SYSTEM_VIEWS INFO: Scan finished, actor: [19:7494331648020636402:2446], owner: [19:7494331648020636399:2444], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-17T17:14:21.531871Z node 19 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744910061522, txId: 281474976715677] shutting down 2025-04-17T17:14:21.706686Z node 19 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01js2bx3z0ftzfnvndwgjwa827, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=19&id=YjAxMzJjYzQtZDJjZTg3MTktODY4NzhjNTItODU5YTQyZTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:14:21.709946Z node 19 :SYSTEM_VIEWS INFO: Scan started, actor: [19:7494331648020636436:2456], owner: [19:7494331648020636433:2454], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-17T17:14:21.710752Z node 19 :SYSTEM_VIEWS INFO: Scan prepared, actor: [19:7494331648020636436:2456], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-17T17:14:21.711145Z node 19 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [19:7494331648020636436:2456], row count: 4, finished: 1 2025-04-17T17:14:21.711167Z node 19 :SYSTEM_VIEWS INFO: Scan finished, actor: [19:7494331648020636436:2456], owner: [19:7494331648020636433:2454], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-17T17:14:21.714142Z node 19 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744910061704, txId: 281474976715679] shutting down 2025-04-17T17:14:22.915628Z node 20 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[20:7494331651366370119:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:22.915762Z node 20 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/000aab/r3tmp/tmpOss1ut/pdisk_1.dat 2025-04-17T17:14:23.147118Z node 20 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:14:23.186818Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:14:23.186948Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:14:23.189103Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10288, node 20 2025-04-17T17:14:23.320894Z node 20 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:14:23.320921Z node 20 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:14:23.320932Z node 20 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:14:23.321118Z node 20 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4033 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:14:23.748140Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:27.916048Z node 20 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[20:7494331651366370119:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:27.916154Z node 20 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:14:28.485322Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T17:14:28.687705Z node 20 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7494331677136174821:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:28.687705Z node 20 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7494331677136174832:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:28.687779Z node 20 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:28.691279Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T17:14:28.707944Z node 20 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [20:7494331677136174835:2356], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T17:14:28.790782Z node 20 :TX_PROXY ERROR: Actor# [20:7494331677136174889:2492] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:14:29.137508Z node 20 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2bxaya2ktktghaxj8dnf63, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=20&id=ZWZjMDM1MmEtYmVkNjBjNzMtMjA1OWNkOWQtNDk1OWE3ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:14:29.146169Z node 20 :SYSTEM_VIEWS INFO: Scan started, actor: [20:7494331681431142238:2370], owner: [20:7494331681431142237:2369], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-17T17:14:29.152358Z node 20 :SYSTEM_VIEWS INFO: Scan prepared, actor: [20:7494331681431142238:2370], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-17T17:14:29.152974Z node 20 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [20:7494331681431142238:2370], row count: 4, finished: 1 2025-04-17T17:14:29.153003Z node 20 :SYSTEM_VIEWS INFO: Scan finished, actor: [20:7494331681431142238:2370], owner: [20:7494331681431142237:2369], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-17T17:14:29.155260Z node 20 :SYSTEM_VIEWS INFO: Scan started, actor: [20:7494331681431142250:2373], owner: [20:7494331681431142237:2369], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-17T17:14:29.164673Z node 20 :SYSTEM_VIEWS INFO: Scan prepared, actor: [20:7494331681431142250:2373], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-17T17:14:29.165181Z node 20 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [20:7494331681431142250:2373], row count: 4, finished: 1 2025-04-17T17:14:29.165212Z node 20 :SYSTEM_VIEWS INFO: Scan finished, actor: [20:7494331681431142250:2373], owner: [20:7494331681431142237:2369], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-17T17:14:29.169604Z node 20 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744910069126, txId: 281474976715661] shutting down |76.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TopicAutoscaling::BalancingAfterSplit_sessionsWithPartition [GOOD] >> TPersQueueMirrorer::TestBasicRemote >> TKqpScanData::EmptyColumns >> Normalizers::SchemaVersionsNormalizer [GOOD] >> TKqpScanData::EmptyColumns [GOOD] >> KqpParams::ParameterTypes [GOOD] >> KqpQuery::CreateAsSelectBadTypes+IsOlap >> TopicAutoscaling::PartitionSplit_DistributedTxCommit_ChildFirst [GOOD] >> TopicAutoscaling::PartitionSplit_DistributedTxCommit_CheckSessionResetAfterCommit >> DataShardReadIterator::ShouldReadRangeChunk1 [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk2 >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterCancelIndexBuild [GOOD] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnSplitMerge ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::SchemaVersionsNormalizer [GOOD] Test command err: 2025-04-17T17:14:29.223162Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T17:14:29.361563Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T17:14:29.385671Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T17:14:29.385983Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T17:14:29.395393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SchemaVersionCleaner; 2025-04-17T17:14:29.395643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-04-17T17:14:29.395818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T17:14:29.395969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T17:14:29.396079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T17:14:29.397075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T17:14:29.397214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T17:14:29.397304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T17:14:29.397421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T17:14:29.397532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T17:14:29.397650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T17:14:29.397756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T17:14:29.397857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T17:14:29.427535Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T17:14:29.427800Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=SchemaVersionCleaner; 2025-04-17T17:14:29.427851Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-04-17T17:14:29.428156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SchemaVersionCleaner;id=NO_VALUE_OPTIONAL; 2025-04-17T17:14:29.428245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2025-04-17T17:14:29.428285Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-04-17T17:14:29.428473Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T17:14:29.428587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T17:14:29.428638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T17:14:29.428675Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-04-17T17:14:29.428757Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T17:14:29.428807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T17:14:29.428864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T17:14:29.428893Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-04-17T17:14:29.429067Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T17:14:29.429128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T17:14:29.429173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T17:14:29.429216Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-04-17T17:14:29.429310Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T17:14:29.429368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T17:14:29.429415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T17:14:29.429441Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=8;type=CleanInsertionDedup; 2025-04-17T17:14:29.429501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T17:14:29.429533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T17:14:29.429560Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-04-17T17:14:29.429605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T17:14:29.429643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T17:14:29.429670Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T17:14:29.430056Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=58; 2025-04-17T17:14:29.430131Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=30; 2025-04-17T17:14:29.430201Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=33; 2025-04-17T17:14:29.430292Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=33; 2025-04-17T17:14:29.430452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T17:14:29.430500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T17:14:29.430530Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T17:14:29.430710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T17:14:29.430753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T17:14:29.430789Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T17:14:29.430931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T17:14:29.430969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T17:14:29.430998Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T17:14:29.431268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;pr ... : SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-17T17:14:33.065800Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-04-17T17:14:33.065863Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-04-17T17:14:33.065906Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=2; 2025-04-17T17:14:33.065964Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=20048;merger=0;interval_id=2; 2025-04-17T17:14:33.066012Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-04-17T17:14:33.066115Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-17T17:14:33.066187Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=20048;finished=1; 2025-04-17T17:14:33.066254Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-17T17:14:33.066496Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T17:14:33.066687Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:20048;schema=key1: uint64 key2: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-17T17:14:33.066735Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-17T17:14:33.066855Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;);columns=3;rows=20048; 2025-04-17T17:14:33.066920Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=2405760;num_rows=20048;batch_columns=key1,key2,field; 2025-04-17T17:14:33.067025Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:455:2462];bytes=2405760;rows=20048;faults=0;finished=0;fault=0;schema=key1: uint64 key2: uint64 field: string; 2025-04-17T17:14:33.067188Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-17T17:14:33.067319Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-17T17:14:33.067440Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-17T17:14:33.068609Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T17:14:33.068767Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-17T17:14:33.068874Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-17T17:14:33.068931Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:457:2463] finished for tablet 9437184 2025-04-17T17:14:33.069381Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:455:2462];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["f_ack","l_task_result"],"t":0.231},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.234}],"full":{"a":1744910072834870,"name":"_full_task","f":1744910072834870,"d_finished":0,"c":0,"l":1744910073069003,"d":234133},"events":[{"name":"bootstrap","f":1744910072835108,"d_finished":2842,"c":1,"l":1744910072837950,"d":2842},{"a":1744910073068581,"name":"ack","f":1744910073066462,"d_finished":1023,"c":1,"l":1744910073067485,"d":1445},{"a":1744910073068564,"name":"processing","f":1744910072840794,"d_finished":110872,"c":9,"l":1744910073067488,"d":111311},{"name":"ProduceResults","f":1744910072836936,"d_finished":2604,"c":12,"l":1744910073068914,"d":2604},{"a":1744910073068918,"name":"Finish","f":1744910073068918,"d_finished":0,"c":0,"l":1744910073069003,"d":85},{"name":"task_result","f":1744910072840812,"d_finished":109692,"c":8,"l":1744910073066317,"d":109692}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-17T17:14:33.069459Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:455:2462];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T17:14:33.069850Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:455:2462];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["f_ack","l_task_result"],"t":0.231},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.234}],"full":{"a":1744910072834870,"name":"_full_task","f":1744910072834870,"d_finished":0,"c":0,"l":1744910073069501,"d":234631},"events":[{"name":"bootstrap","f":1744910072835108,"d_finished":2842,"c":1,"l":1744910072837950,"d":2842},{"a":1744910073068581,"name":"ack","f":1744910073066462,"d_finished":1023,"c":1,"l":1744910073067485,"d":1943},{"a":1744910073068564,"name":"processing","f":1744910072840794,"d_finished":110872,"c":9,"l":1744910073067488,"d":111809},{"name":"ProduceResults","f":1744910072836936,"d_finished":2604,"c":12,"l":1744910073068914,"d":2604},{"a":1744910073068918,"name":"Finish","f":1744910073068918,"d_finished":0,"c":0,"l":1744910073069501,"d":583},{"name":"task_result","f":1744910072840812,"d_finished":109692,"c":8,"l":1744910073066317,"d":109692}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-17T17:14:33.069941Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T17:14:32.834356Z;index_granules=0;index_portions=1;index_batches=953;committed_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=2589608;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=2589608;selected_rows=0; 2025-04-17T17:14:33.069982Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T17:14:33.070221Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; |76.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::EmptyColumns [GOOD] >> TComputeScheduler::ResourceWeight [GOOD] >> TKqpScanData::ArrowToUnboxedValueConverter >> TKqpScanData::ArrowToUnboxedValueConverter [GOOD] >> TConsoleTests::TestCreateTenantWrongPool [GOOD] >> TConsoleTests::TestCreateTenantWrongPoolExtSubdomain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TComputeScheduler::ResourceWeight [GOOD] Test command err: 510 500 1510 1500 990 1000 1000 1000 |76.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::ArrowToUnboxedValueConverter [GOOD] >> WithSDK::CommitToPastWithWrongSessionId [GOOD] >> YdbSdkSessionsPool::StressTestAsync1 [GOOD] >> TKqpScanData::EmptyColumnsAndNonEmptyArrowBatch >> TColumnShardTestReadWrite::WriteReadStandaloneExoticTypes [GOOD] >> TKqpScanData::EmptyColumnsAndNonEmptyArrowBatch [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinue+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinue-EvWrite >> TKqpScanData::DifferentNumberOfInputAndResultColumns >> KqpStats::StreamLookupStats-StreamLookupJoin [GOOD] >> KqpStats::SysViewCancelled >> TKqpScanData::DifferentNumberOfInputAndResultColumns [GOOD] |76.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::EmptyColumnsAndNonEmptyArrowBatch [GOOD] >> TComputeScheduler::TTotalLimits [GOOD] |76.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::DifferentNumberOfInputAndResultColumns [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_PQv1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadStandaloneExoticTypes [GOOD] Test command err: 2025-04-17T17:14:29.813701Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T17:14:29.979680Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T17:14:30.002887Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T17:14:30.003205Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T17:14:30.013499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T17:14:30.013709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T17:14:30.013953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T17:14:30.014080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T17:14:30.014197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T17:14:30.014296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T17:14:30.014389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T17:14:30.014519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T17:14:30.014619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T17:14:30.014722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T17:14:30.014827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T17:14:30.014918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T17:14:30.047974Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T17:14:30.048204Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T17:14:30.048267Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T17:14:30.048464Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T17:14:30.048641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T17:14:30.048716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T17:14:30.048764Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T17:14:30.048856Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T17:14:30.048923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T17:14:30.048966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T17:14:30.048994Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T17:14:30.049191Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T17:14:30.049276Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T17:14:30.049318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T17:14:30.049351Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T17:14:30.049440Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T17:14:30.049496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T17:14:30.049537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T17:14:30.049567Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T17:14:30.049684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T17:14:30.049733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T17:14:30.049764Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T17:14:30.049818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T17:14:30.049860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T17:14:30.049888Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T17:14:30.050318Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=72; 2025-04-17T17:14:30.050437Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=61; 2025-04-17T17:14:30.050521Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=35; 2025-04-17T17:14:30.050595Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=34; 2025-04-17T17:14:30.050781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T17:14:30.050845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T17:14:30.050883Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T17:14:30.051059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T17:14:30.051100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T17:14:30.051131Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T17:14:30.051286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T17:14:30.051335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T17:14:30.051379Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T17:14:30.051578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T17:14:30.051619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T17:14:30.051647Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T17:14:30.051786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T17:14:30.051826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T17:14:30.051871Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... d: binary ingested_at: timestamp[us] saved_at: timestamp[us] request_id: binary;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T17:14:35.427036Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-17T17:14:35.427138Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-04-17T17:14:35.427219Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=2759;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-04-17T17:14:35.427354Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:422:2437];bytes=2759;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: binary json_payload: binary ingested_at: timestamp[us] saved_at: timestamp[us] request_id: binary; 2025-04-17T17:14:35.427515Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T17:14:35.427632Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T17:14:35.427776Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T17:14:35.427987Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T17:14:35.428145Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T17:14:35.428269Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T17:14:35.428308Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:423:2438] finished for tablet 9437184 2025-04-17T17:14:35.428776Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:422:2437];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.011},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.013}],"full":{"a":1744910075414939,"name":"_full_task","f":1744910075414939,"d_finished":0,"c":0,"l":1744910075428358,"d":13419},"events":[{"name":"bootstrap","f":1744910075415118,"d_finished":2660,"c":1,"l":1744910075417778,"d":2660},{"a":1744910075427964,"name":"ack","f":1744910075426787,"d_finished":1018,"c":1,"l":1744910075427805,"d":1412},{"a":1744910075427954,"name":"processing","f":1744910075417837,"d_finished":5039,"c":10,"l":1744910075427808,"d":5443},{"name":"ProduceResults","f":1744910075416617,"d_finished":2958,"c":13,"l":1744910075428295,"d":2958},{"a":1744910075428297,"name":"Finish","f":1744910075428297,"d_finished":0,"c":0,"l":1744910075428358,"d":61},{"name":"task_result","f":1744910075417849,"d_finished":3900,"c":9,"l":1744910075426619,"d":3900}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T17:14:35.428851Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:422:2437];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T17:14:35.429209Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:422:2437];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.011},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.013}],"full":{"a":1744910075414939,"name":"_full_task","f":1744910075414939,"d_finished":0,"c":0,"l":1744910075428887,"d":13948},"events":[{"name":"bootstrap","f":1744910075415118,"d_finished":2660,"c":1,"l":1744910075417778,"d":2660},{"a":1744910075427964,"name":"ack","f":1744910075426787,"d_finished":1018,"c":1,"l":1744910075427805,"d":1941},{"a":1744910075427954,"name":"processing","f":1744910075417837,"d_finished":5039,"c":10,"l":1744910075427808,"d":5972},{"name":"ProduceResults","f":1744910075416617,"d_finished":2958,"c":13,"l":1744910075428295,"d":2958},{"a":1744910075428297,"name":"Finish","f":1744910075428297,"d_finished":0,"c":0,"l":1744910075428887,"d":590},{"name":"task_result","f":1744910075417849,"d_finished":3900,"c":9,"l":1744910075426619,"d":3900}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T17:14:35.429298Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T17:14:35.414378Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=13268;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=13268;selected_rows=0; 2025-04-17T17:14:35.429339Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T17:14:35.429674Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::StressTestAsync1 [GOOD] Test command err: 2025-04-17T17:13:25.420164Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494331408016561352:2084];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:13:25.429200Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/001349/r3tmp/tmpue4ukJ/pdisk_1.dat 2025-04-17T17:13:26.169804Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:13:26.200395Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:13:26.200483Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:13:26.210472Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23295, node 1 2025-04-17T17:13:26.380694Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:13:26.380735Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:13:26.380742Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:13:26.380883Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9615 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:13:26.725629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:13:30.367542Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494331408016561352:2084];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:13:30.367620Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:13:41.155240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:13:41.155273Z node 1 :IMPORT WARN: Table profiles were not loaded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TComputeScheduler::TTotalLimits [GOOD] Test command err: 1610 1600 1610 1600 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> WithSDK::CommitToPastWithWrongSessionId [GOOD] Test command err: 2025-04-17T17:12:10.101712Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494331084547192009:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:12:10.102226Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:12:15.545734Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494331084547192009:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:12:15.561558Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:12:16.659696Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/001824/r3tmp/tmpQ9Xmtm/pdisk_1.dat 2025-04-17T17:12:17.797730Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:17.800720Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:19.296724Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:19.316294Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:20.315440Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:20.321890Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:21.847758Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:12:22.277076Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:12:22.277475Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:12:22.375834Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6660, node 1 2025-04-17T17:12:23.203654Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:23.203677Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:24.936684Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6vg8/001824/r3tmp/yandexZ4FR4Z.tmp 2025-04-17T17:12:24.937428Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6vg8/001824/r3tmp/yandexZ4FR4Z.tmp 2025-04-17T17:12:24.938144Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6vg8/001824/r3tmp/yandexZ4FR4Z.tmp 2025-04-17T17:12:24.938868Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:12:27.960912Z INFO: TTestServer started on Port 22467 GrpcPort 6660 TClient is connected to server localhost:22467 2025-04-17T17:12:36.197676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:12:36.197695Z node 1 :IMPORT WARN: Table profiles were not loaded PQClient connected to localhost:6660 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:12:38.822567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:12:39.116353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:12:39.516512Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:12:39.543929Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-04-17T17:12:47.235859Z node 1 :KQP_PROXY ERROR: TraceId: "01js2bt0cq9pnbkpskp2c2k2ww", Request deadline has expired for 2.661728s seconds 2025-04-17T17:12:47.306114Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331243460982838:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:47.306315Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:47.306806Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331243460982873:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:47.310252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-17T17:12:47.431623Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494331243460982875:2366], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-17T17:12:47.481906Z node 1 :TX_PROXY ERROR: Actor# [1:7494331243460982939:2500] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:12:47.866856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:12:47.868840Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494331243460982947:2372], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:12:47.870414Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzZjNTViODItNzA3OGVmOTctMmFiMmJjOTktNWJhNmZhNDQ=, ActorId: [1:7494331243460982834:2359], ActorState: ExecuteState, TraceId: 01js2bt7y75rc4a0h5ktekd748, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:12:47.872728Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:12:47.949163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:12:48.050343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7494331247755950535:2682] === CheckClustersList. Ok 2025-04-17T17:12:54.732289Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-17T17:12:54.777504Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-17T17:12:54.781132Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7494331273525754594:2836], Recipient [1:7494331127496865383:2181]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:12:54.781173Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:12:54.781190Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-17T17:12:54.781254Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7494331273525754590:2833], Recipient [1:7494331127496865383:2181]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-04-17T17:12:54.781270Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-17T17:12:54.962262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ... 27Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 271188494, Sender [6:7494331677119968708:2459], Recipient [6:7494331677119968649:2455]: NKikimr::TEvPQ::TEvError 2025-04-17T17:14:34.116945Z node 6 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvError 2025-04-17T17:14:34.116962Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvError Cookie 16, Error set offset in already dead session wrong-read-session-id actual is 2025-04-17T17:14:34.116976Z node 6 :PERSQUEUE DEBUG: Answer error topic: 'test-topic' partition: 0 messageNo: 0 requestId: error: set offset in already dead session wrong-read-session-id actual is 2025-04-17T17:14:34.117554Z node 6 :PERSQUEUE TRACE: StateFunc, received event# 271450112, Sender [6:7494331677119968684:2458], Recipient [6:7494331629875326894:2077]: NKikimr::NPQ::TEvPqCache::TEvCacheL2Request 2025-04-17T17:14:34.117588Z node 6 :PERSQUEUE TRACE: StateFunc, processing event TEvPqCache::TEvCacheL2Request 2025-04-17T17:14:34.118493Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [6:7494331702889773614:3364], Recipient [6:7494331677119968649:2455]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:14:34.118518Z node 6 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:14:34.118536Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:14:34.118558Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [6:7494331702889773613:2778] destroyed 2025-04-17T17:14:34.128331Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494331677119968649:2455], Partition 0, Sender [0:0:0], Recipient [6:7494331677119968708:2459], Cookie: 0 2025-04-17T17:14:34.128386Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494331677119968708:2459]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:14:34.128409Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:14:34.128448Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:14:34.128511Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:14:34.128531Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:14:34.128556Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:14:34.168625Z node 6 :PQ_READ_PROXY DEBUG: new Describe consumer request 2025-04-17T17:14:34.168719Z node 6 :PQ_READ_PROXY DEBUG: TDescribeConsumerActor for request operation_params { } path: "test-topic" consumer: "test-consumer" include_stats: true include_location: true 2025-04-17T17:14:34.169634Z node 6 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [6:7494331702889773625:2782]: Request location 2025-04-17T17:14:34.169661Z node 6 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [6:7494331702889773625:2782]: Request sessions 2025-04-17T17:14:34.170110Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 269877761, Sender [6:7494331702889773631:3371], Recipient [6:7494331677119968649:2455]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:14:34.170141Z node 6 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:14:34.170156Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2025-04-17T17:14:34.170183Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [6:7494331702889773628:2783], now have 1 active actors on pipe 2025-04-17T17:14:34.170233Z node 6 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][test-topic] pipe [6:7494331702889773629:2784] connected; active server actors: 1 2025-04-17T17:14:34.170379Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 271187975, Sender [6:7494331702889773625:2782], Recipient [6:7494331677119968649:2455]: NKikimrPQ.TStatus ClientId: "test-consumer" 2025-04-17T17:14:34.170379Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 6, Generation 1 2025-04-17T17:14:34.170395Z node 6 :PERSQUEUE TRACE: HandleHook, processing event TEvPersQueue::TEvStatus 2025-04-17T17:14:34.170411Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892] Handle TEvPersQueue::TEvStatus 2025-04-17T17:14:34.170444Z node 6 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [6:7494331702889773625:2782]: Got location 2025-04-17T17:14:34.170463Z node 6 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [6:7494331702889773625:2782]: Got sessions 2025-04-17T17:14:34.170506Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188491 (NKikimr::TEvPQ::TEvPartitionStatus), Tablet [6:7494331677119968649:2455], Partition 0, Sender [6:7494331677119968649:2455], Recipient [6:7494331677119968708:2459], Cookie: 0 2025-04-17T17:14:34.170547Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188491, Sender [6:7494331677119968649:2455], Recipient [6:7494331677119968708:2459]: NKikimr::TEvPQ::TEvPartitionStatus 2025-04-17T17:14:34.170567Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvPartitionStatus 2025-04-17T17:14:34.170759Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } 2025-04-17T17:14:34.170866Z node 6 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][test-topic] pipe [6:7494331702889773629:2784] disconnected; active server actors: 1 2025-04-17T17:14:34.170889Z node 6 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][test-topic] pipe [6:7494331702889773629:2784] disconnected no session 2025-04-17T17:14:34.171098Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [6:7494331702889773631:3371], Recipient [6:7494331677119968649:2455]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:14:34.171118Z node 6 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:14:34.171134Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:14:34.171154Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [6:7494331702889773628:2783] destroyed 2025-04-17T17:14:34.182080Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435095, Sender [0:0:0], Recipient [6:7494331629875327247:2138]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTopicStats 2025-04-17T17:14:34.182120Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTopicStats 2025-04-17T17:14:34.182145Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-17T17:14:34.182155Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-17T17:14:34.182205Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTopicStats on# 0.000000s, queue# 1 2025-04-17T17:14:34.183084Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435095, Sender [0:0:0], Recipient [6:7494331629875327247:2138]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTopicStats 2025-04-17T17:14:34.183108Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTopicStats 2025-04-17T17:14:34.183123Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-17T17:14:34.228698Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494331677119968649:2455], Partition 0, Sender [0:0:0], Recipient [6:7494331677119968708:2459], Cookie: 0 2025-04-17T17:14:34.228760Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494331677119968708:2459]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:14:34.228787Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:14:34.228845Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:14:34.228919Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:14:34.228942Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:14:34.228992Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:14:34.329036Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494331677119968649:2455], Partition 0, Sender [0:0:0], Recipient [6:7494331677119968708:2459], Cookie: 0 2025-04-17T17:14:34.329155Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494331677119968708:2459]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:14:34.329183Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:14:34.329228Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:14:34.329299Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:14:34.329322Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:14:34.329354Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:14:34.430465Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494331677119968649:2455], Partition 0, Sender [0:0:0], Recipient [6:7494331677119968708:2459], Cookie: 0 2025-04-17T17:14:34.430539Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494331677119968708:2459]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:14:34.430569Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:14:34.430618Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:14:34.430694Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:14:34.430717Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:14:34.430748Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 >> TKqpScanData::FailOnUnsupportedPgType >> TKqpScanData::FailOnUnsupportedPgType [GOOD] |76.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::FailOnUnsupportedPgType [GOOD] >> KqpQuery::QueryStats-UseSink [GOOD] >> KqpQuery::QueryFromSqs >> DataShardReadIterator::ShouldReadFromHeadWithConflict-UseSink [GOOD] >> DataShardReadIterator::ShouldReadFromHeadToMvccWithConflict+UseSink >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeLeftBorder+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeLeftBorder-EvWrite >> DataShardReadIterator::ShouldNotReadFutureMvccFromFollower [GOOD] >> DataShardReadIterator::ShouldProperlyOrderConflictingTransactionsMvcc+UseSink >> TComputeScheduler::QueryLimits [GOOD] >> TConsoleTests::TestCreateTenantWrongPoolExtSubdomain [GOOD] >> TConsoleTests::TestCreateTenantAlreadyExists ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TComputeScheduler::QueryLimits [GOOD] Test command err: 800 800 800 800 >> KqpQuery::CreateAsSelectBadTypes+IsOlap [GOOD] >> SystemView::AuthOwners_TableRange [GOOD] >> SystemView::AuthPermissions >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW+VolatileTxs |76.2%| [TA] $(B)/ydb/core/kqp/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW+VolatileTxs >> TopicAutoscaling::ControlPlane_PauseAutoPartitioning [GOOD] >> TopicAutoscaling::MidOfRange [GOOD] >> KqpQuery::DictJoin [GOOD] >> SystemView::AuthGroupMembers_TableRange [GOOD] >> SystemView::AuthEffectivePermissions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::CreateAsSelectBadTypes+IsOlap [GOOD] Test command err: Trying to start YDB, gRPC: 5656, MsgBus: 30793 2025-04-17T17:14:14.723623Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494331619619558641:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:14.724087Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/00245d/r3tmp/tmpovS7Fe/pdisk_1.dat 2025-04-17T17:14:15.338047Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:14:15.340536Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:14:15.340637Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:14:15.349599Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5656, node 1 2025-04-17T17:14:15.488676Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:14:15.488697Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:14:15.488707Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:14:15.488835Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30793 TClient is connected to server localhost:30793 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:14:16.163562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:16.187779Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:14:16.200316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:16.363454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:14:16.539112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:16.610257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T17:14:18.153875Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331636799429450:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:18.153978Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:18.459891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:14:18.494154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:14:18.564307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:14:18.593347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:14:18.624479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:14:18.704042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:14:18.769909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:14:18.876270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331636799430124:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:18.876380Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:18.879755Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331636799430129:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:18.883815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:14:18.895675Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494331636799430131:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:14:18.983378Z node 1 :TX_PROXY ERROR: Actor# [1:7494331636799430187:3551] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:14:19.721590Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494331619619558641:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:19.721655Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 28790, MsgBus: 6515 2025-04-17T17:14:21.061414Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494331646615505170:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:21.061464Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/00245d/r3tmp/tmpB3Y8fX/pdisk_1.dat 2025-04-17T17:14:21.196338Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28790, node 2 2025-04-17T17:14:21.223947Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:14:21.224011Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:14:21.225785Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:14:21.264336Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:14:21.264356Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:14:21.264364Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:14:21.264500Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6515 TClient is connected to server localhost:6515 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:14:21.669182Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:21.674053Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T17:14:21.689875Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:21.762322Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T1 ... 81474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:27.867565Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:28.082429Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:28.181882Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:30.440518Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494331688861021827:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:30.440616Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:30.501535Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T17:14:30.552078Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T17:14:30.588880Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T17:14:30.644168Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T17:14:30.681618Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T17:14:30.767939Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T17:14:30.808640Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T17:14:30.885647Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494331688861022492:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:30.885746Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:30.885973Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494331688861022497:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:30.890191Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T17:14:30.906563Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494331688861022499:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T17:14:30.986669Z node 3 :TX_PROXY ERROR: Actor# [3:7494331688861022556:3544] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:14:32.034387Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494331675976118180:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:32.035127Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 8947, MsgBus: 31467 2025-04-17T17:14:33.807049Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494331699661729588:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:33.807225Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/00245d/r3tmp/tmpNl2MNg/pdisk_1.dat 2025-04-17T17:14:33.938191Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:14:33.967323Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:14:33.967388Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:14:33.973664Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8947, node 4 2025-04-17T17:14:34.080666Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:14:34.080693Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:14:34.080703Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:14:34.080833Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31467 TClient is connected to server localhost:31467 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:14:34.695677Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:34.707342Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T17:14:37.510106Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494331716841599403:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:37.510252Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:37.510719Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494331716841599438:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:37.517746Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T17:14:37.529396Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494331716841599440:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T17:14:37.585101Z node 4 :TX_PROXY ERROR: Actor# [4:7494331716841599492:2335] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:14:37.657346Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7494331716841599513:2340], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:6:62: Error: At function: KiCreateTable!
:6:20: Error: Invalid type for column: Value. Only YQL data types and PG types are currently supported, code: 2031 2025-04-17T17:14:37.659216Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=N2IxYzAwMTUtYzI3YzhjZWUtODgxZGM4ZmEtMjE0ZTVjYQ==, ActorId: [4:7494331716841599400:2327], ActorState: ExecuteState, TraceId: 01js2bxgvf7vhxjzrhysp239n6, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-04-17T17:14:37.718263Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7494331716841599537:2349], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:6:45: Error: At function: KiCreateTable!
:6:20: Error: Invalid type for column: Value. Only YQL data types and PG types are currently supported, code: 2031 2025-04-17T17:14:37.719896Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YWRjNTVhYWUtZmQzOWNhMTEtZDFkOTkwN2ItMTMxNTI2MWM=, ActorId: [4:7494331716841599526:2343], ActorState: ExecuteState, TraceId: 01js2bxkq8138wm35kkq5wvbh5, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-04-17T17:14:37.760332Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7494331716841599549:2355], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:6:43: Error: At function: KiCreateTable!
:6:20: Error: Invalid type for column: Value. Only YQL data types and PG types are currently supported, code: 2031 2025-04-17T17:14:37.761788Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZDJlZTQ3NzEtMTljZDY1MmUtMjM5OGNkMmItNWRiYzJlNGM=, ActorId: [4:7494331716841599543:2352], ActorState: ExecuteState, TraceId: 01js2bxkrx0kpa0pnaf1pj3e2d, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: >> DataShardReadIteratorPageFaults::LocksNotLostOnPageFault [GOOD] >> DataShardReadIteratorState::ShouldCalculateQuota [GOOD] >> DataShardReadIteratorSysTables::ShouldForbidSchemaVersion |76.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_AutoscaleAwareSDK |76.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |76.2%| [TA] {RESULT} $(B)/ydb/core/kqp/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} |76.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::DictJoin [GOOD] Test command err: Trying to start YDB, gRPC: 9320, MsgBus: 27683 2025-04-17T17:14:12.894640Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494331610277059636:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:12.895133Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/002461/r3tmp/tmpGokMJi/pdisk_1.dat 2025-04-17T17:14:13.379525Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:14:13.386150Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:14:13.387314Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:14:13.391264Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9320, node 1 2025-04-17T17:14:13.502906Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:14:13.502966Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:14:13.502975Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:14:13.503074Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27683 TClient is connected to server localhost:27683 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:14:14.115491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:14.129154Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:14:14.136533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:14.350731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:14.529303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:14.607897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:16.511753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331627456930561:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:16.511862Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:16.860900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:14:16.902931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:14:16.960393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:14:17.002513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:14:17.089813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:14:17.179195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:14:17.260479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:14:17.378217Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331631751898536:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:17.378293Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:17.378484Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331631751898541:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:17.383243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:14:17.400185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494331631751898543:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:14:17.502693Z node 1 :TX_PROXY ERROR: Actor# [1:7494331631751898602:3557] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:14:17.895041Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494331610277059636:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:17.895119Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 16126, MsgBus: 21920 2025-04-17T17:14:19.543446Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494331640251563656:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:19.543492Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/002461/r3tmp/tmpyUHbR9/pdisk_1.dat 2025-04-17T17:14:19.662931Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16126, node 2 2025-04-17T17:14:19.679508Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:14:19.679588Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:14:19.682262Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:14:19.814113Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:14:19.814131Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:14:19.814137Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:14:19.814235Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21920 TClient is connected to server localhost:21920 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:14:20.405788Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:20.416798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:20.495174Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:20.677664Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opI ... undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T17:14:29.139215Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T17:14:29.180443Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T17:14:29.262283Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T17:14:29.309377Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T17:14:29.350509Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T17:14:29.392308Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T17:14:29.453050Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494331682842500982:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:29.453131Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:29.453159Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494331682842500987:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:29.456290Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T17:14:29.467040Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494331682842500989:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T17:14:29.557014Z node 3 :TX_PROXY ERROR: Actor# [3:7494331682842501047:3543] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:14:30.572724Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494331665662629351:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:30.584374Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 14977, MsgBus: 26566 2025-04-17T17:14:32.138343Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494331697509209169:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:32.138398Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/002461/r3tmp/tmpxDQGjM/pdisk_1.dat 2025-04-17T17:14:32.416996Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:14:32.437230Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:14:32.437314Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:14:32.438486Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14977, node 4 2025-04-17T17:14:32.484492Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:14:32.484513Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:14:32.484521Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:14:32.484639Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26566 TClient is connected to server localhost:26566 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:14:33.046385Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:33.060488Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T17:14:33.079632Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:33.191914Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:14:33.373708Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T17:14:33.463310Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:36.285288Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494331714689080002:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:36.285456Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:36.348501Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T17:14:36.387060Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T17:14:36.423262Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T17:14:36.457284Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T17:14:36.491950Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T17:14:36.542672Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T17:14:36.609761Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T17:14:36.682213Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494331714689080661:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:36.682317Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:36.682562Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494331714689080666:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:36.687140Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T17:14:36.701168Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494331714689080668:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T17:14:36.802270Z node 4 :TX_PROXY ERROR: Actor# [4:7494331714689080725:3544] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:14:37.138471Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494331697509209169:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:37.138561Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |76.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |76.2%| [LD] {RESULT} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |76.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::MidOfRange [GOOD] Test command err: 2025-04-17T17:12:20.555873Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494331128762563252:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:12:20.555933Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/001713/r3tmp/tmp3zNG1L/pdisk_1.dat 2025-04-17T17:12:28.673300Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:12:28.674189Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:12:28.774860Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:12:29.076645Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494331128762563252:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:12:29.509533Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2166} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.170218s 2025-04-17T17:12:29.509828Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:666} StateWork event processing took too much time Type# 2146435078 Duration# 0.170770s TServer::EnableGrpc on GrpcPort 11080, node 1 2025-04-17T17:12:31.268663Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:12:31.268886Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:31.268895Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:12:32.868780Z INFO: TTestServer started on Port 18595 GrpcPort 11080 2025-04-17T17:12:34.093962Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:12:34.142384Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6vg8/001713/r3tmp/yandexSmq1bO.tmp 2025-04-17T17:12:34.143336Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6vg8/001713/r3tmp/yandexSmq1bO.tmp 2025-04-17T17:12:34.167985Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6vg8/001713/r3tmp/yandexSmq1bO.tmp 2025-04-17T17:12:34.170781Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18595 PQClient connected to localhost:11080 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:12:39.890961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:12:40.439860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:12:44.376806Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:12:44.857179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:12:44.857199Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:12:49.463433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331253316615713:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:49.463562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:49.465784Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331253316615725:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:49.470213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-17T17:12:49.489095Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494331253316615727:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-17T17:12:49.892709Z node 1 :TX_PROXY ERROR: Actor# [1:7494331253316615791:2490] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:12:49.924097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:12:50.014655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:12:50.142659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:12:50.155795Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494331253316615818:2361], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:12:50.157810Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzhjYmMyMWQtOTM0YzFjYy1kMTFkY2VmYy02ZTBiNTA5MA==, ActorId: [1:7494331253316615710:2347], ActorState: ExecuteState, TraceId: 01js2bta1db75c1rj9ryk75g89, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:12:50.159282Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7494331257611583397:2674] === CheckClustersList. Ok 2025-04-17T17:12:55.831427Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-17T17:12:55.850039Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-17T17:12:55.855528Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7494331279086420102:2792], Recipient [1:7494331163122302092:2216]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:12:55.855579Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:12:55.855592Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-17T17:12:55.855628Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7494331279086420098:2789], Recipient [1:7494331163122302092:2216]: {TEvModifySchemeTransaction txid# 281474976710672 TabletId# 72057594046644480} 2025-04-17T17:12:55.855642Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-17T17:12:56.630641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 2 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 2 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710672 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-17T17:12:56.645813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T17:12:56.647385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPath ... # 72057594046644480 2025-04-17T17:14:38.566664Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715673:0 progress is 1/1 2025-04-17T17:14:38.566677Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715673 ready parts: 1/1 2025-04-17T17:14:38.566698Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715673:0 progress is 1/1 2025-04-17T17:14:38.566709Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715673 ready parts: 1/1 2025-04-17T17:14:38.566728Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715673, ready parts: 1/1, is published: false 2025-04-17T17:14:38.566749Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715673 ready parts: 1/1 2025-04-17T17:14:38.566771Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715673:0 2025-04-17T17:14:38.566781Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715673:0 2025-04-17T17:14:38.566903Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 5 2025-04-17T17:14:38.566920Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715673, publications: 1, subscribers: 1 2025-04-17T17:14:38.566933Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715673, [OwnerId: 72057594046644480, LocalPathId: 13], 3 2025-04-17T17:14:38.568415Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [6:7494331677211823169:2241], Recipient [6:7494331677211823001:2147]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046644480 Generation: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 13] Version: 3 } 2025-04-17T17:14:38.568437Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-04-17T17:14:38.568513Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 13 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715673 2025-04-17T17:14:38.568593Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 13 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715673 2025-04-17T17:14:38.568606Z node 6 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715673 2025-04-17T17:14:38.568626Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715673, pathId: [OwnerId: 72057594046644480, LocalPathId: 13], version: 3 2025-04-17T17:14:38.568646Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 4 2025-04-17T17:14:38.568726Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715673, subscribers: 1 2025-04-17T17:14:38.568744Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [6:7494331720161497182:2470] 2025-04-17T17:14:38.568767Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-17T17:14:38.568947Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T17:14:38.569158Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715673 2025-04-17T17:14:38.569169Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T17:14:38.569228Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [6:7494331720161497182:2470] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715673 at schemeshard: 72057594046644480 2025-04-17T17:14:38.569396Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 270794753, Sender [6:7494331720161497222:2460], Recipient [6:7494331720161497096:2460]: NKikimr::TEvKeyValue::TEvIntermediate 2025-04-17T17:14:38.570528Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 270795264, Sender [6:7494331720161497096:2460], Recipient [6:7494331720161497096:2460]: NKikimrClient.TResponse Status: 1 Cookie: 5 WriteResult { Status: 0 StatusFlags: 1 } WriteResult { Status: 0 StatusFlags: 1 } WriteResult { Status: 0 StatusFlags: 1 } 2025-04-17T17:14:38.570551Z node 6 :PERSQUEUE TRACE: HandleHook, processing event TEvKeyValue::TEvResponse 2025-04-17T17:14:38.570571Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-17T17:14:38.570593Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state EXECUTED 2025-04-17T17:14:38.570613Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715673, State EXECUTED 2025-04-17T17:14:38.570633Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715673 State EXECUTED FrontTxId 281474976715673 2025-04-17T17:14:38.570651Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TPersQueue::SendEvReadSetAckToSenders 2025-04-17T17:14:38.570670Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715673, NewState WAIT_RS_ACKS 2025-04-17T17:14:38.570685Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715673 moved from EXECUTED to WAIT_RS_ACKS 2025-04-17T17:14:38.570710Z node 6 :PERSQUEUE DEBUG: [TxId: 281474976715673] PredicateAcks: 0/0 2025-04-17T17:14:38.570720Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-04-17T17:14:38.570735Z node 6 :PERSQUEUE DEBUG: [TxId: 281474976715673] PredicateAcks: 0/0 2025-04-17T17:14:38.570751Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] add an TxId 281474976715673 to the list for deletion 2025-04-17T17:14:38.570769Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715673, NewState DELETING 2025-04-17T17:14:38.570792Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete key for TxId 281474976715673 2025-04-17T17:14:38.570842Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-17T17:14:38.570903Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 270794756, Sender [6:7494331720161497096:2460], Recipient [6:7494331720161497096:2460]: NKikimr::TEvKeyValue::TEvCollect 2025-04-17T17:14:38.571046Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 270794752, Sender [6:7494331720161497096:2460], Recipient [6:7494331720161497096:2460]: NKikimrClient.TKeyValueRequest Cookie: 5 CmdDeleteRange { Range { From: "tx_00000281474976715673" IncludeFrom: true To: "tx_00000281474976715673" IncludeTo: true } } CmdWrite { Key: "_txinfo" Value: "\020\214\245\373\245\3442\030\231\247\200\200\200\200@(\240\215\0060\214\245\373\245\34428\231\247\200\200\200\200@" StorageChannel: INLINE } 2025-04-17T17:14:38.571169Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 270794753, Sender [6:7494331720161497227:2460], Recipient [6:7494331720161497096:2460]: NKikimr::TEvKeyValue::TEvIntermediate 2025-04-17T17:14:38.571619Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 270794760, Sender [6:7494331720161497226:2474], Recipient [6:7494331720161497096:2460]: NKikimr::TEvKeyValue::TEvCompleteGC 2025-04-17T17:14:38.571664Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [6:7494331720161497197:2852], Recipient [6:7494331677211823001:2147]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:14:38.571686Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:14:38.571718Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-04-17T17:14:38.572005Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 270795264, Sender [6:7494331720161497096:2460], Recipient [6:7494331720161497096:2460]: NKikimrClient.TResponse Status: 1 Cookie: 5 DeleteRangeResult { Status: 0 } WriteResult { Status: 0 StatusFlags: 1 } 2025-04-17T17:14:38.572025Z node 6 :PERSQUEUE TRACE: HandleHook, processing event TEvKeyValue::TEvResponse 2025-04-17T17:14:38.572040Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-17T17:14:38.572056Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state DELETING 2025-04-17T17:14:38.572073Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715673, State DELETING 2025-04-17T17:14:38.572096Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete TxId 281474976715673 2025-04-17T17:14:38.572305Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 270794756, Sender [6:7494331720161497096:2460], Recipient [6:7494331720161497096:2460]: NKikimr::TEvKeyValue::TEvCollect 2025-04-17T17:14:38.573231Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 270794760, Sender [6:7494331720161497230:2475], Recipient [6:7494331720161497096:2460]: NKikimr::TEvKeyValue::TEvCompleteGC 2025-04-17T17:14:38.578019Z node 6 :PQ_READ_PROXY DEBUG: new Describe topic request 2025-04-17T17:14:38.578102Z node 6 :PQ_READ_PROXY DEBUG: TDescribeTopicActor for request operation_params { } path: "autoscalit-topic" 2025-04-17T17:14:38.578227Z node 6 :PQ_READ_PROXY DEBUG: Describe topic actor for path autoscalit-topic 2025-04-17T17:14:38.613334Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494331720161497096:2460], Partition 0, Sender [0:0:0], Recipient [6:7494331720161497157:2464], Cookie: 0 2025-04-17T17:14:38.613402Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494331720161497157:2464]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:14:38.613431Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:14:38.613481Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:14:38.613551Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:14:38.613576Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:14:38.613612Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:14:38.717094Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494331720161497096:2460], Partition 0, Sender [0:0:0], Recipient [6:7494331720161497157:2464], Cookie: 0 2025-04-17T17:14:38.717178Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494331720161497157:2464]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:14:38.717204Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:14:38.717250Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:14:38.717328Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:14:38.717353Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:14:38.717388Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 >> Cdc::NaN[PqRunner] [GOOD] >> Cdc::NaN[YdsRunner] >> DataShardReadIterator::ShouldReadRangeChunk2 [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk3 |76.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |76.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |76.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |76.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |76.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |76.3%| [LD] {RESULT} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |76.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |76.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |76.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> StreamCreator::Basic >> StreamCreator::WithResolvedTimestamps >> DataShardTxOrder::RandomDotRanges_DelayRS [GOOD] >> SystemView::ShowCreateTableColumn [GOOD] >> SystemView::ShowCreateTableKeyBloomFilter |76.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> TConsoleTests::TestCreateTenantAlreadyExists [GOOD] >> TConsoleTests::TestCreateTenantAlreadyExistsExtSubdomain |76.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |76.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |76.3%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |76.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut >> TColumnShardTestReadWrite::CompactionInGranule_PKString >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64 >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinue-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips+EvWrite >> Normalizers::EmptyTablesNormalizer >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32_Reboot >> AsyncIndexChangeExchange::ShouldDeliverChangesOnSplitMerge [GOOD] >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowByCount >> TColumnShardTestReadWrite::ReadAggregate >> TColumnShardTestReadWrite::WriteOverload+InStore+WithWritePortionsOnInsert >> TColumnShardTestReadWrite::ReadWithProgramLike ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomDotRanges_DelayRS [GOOD] Test command err: 2025-04-17T17:13:08.545220Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:13:08.679482Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:13:08.693910Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:126:2151] 2025-04-17T17:13:08.710136Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:13:08.978271Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:13:08.995301Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:13:09.274757Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:13:09.276458Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-17T17:13:09.276539Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-17T17:13:09.276662Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-17T17:13:09.277015Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:13:09.277476Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:13:09.277544Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:189:2151] in generation 2 2025-04-17T17:13:09.277865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:13:09.277950Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:13:09.447825Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:13:09.518490Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-17T17:13:09.518664Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:13:09.518750Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-04-17T17:13:09.518782Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-17T17:13:09.518817Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-17T17:13:09.518848Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:13:09.518978Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:09.519026Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:09.519288Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-17T17:13:09.519379Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-17T17:13:09.519481Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T17:13:09.519512Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:13:09.519549Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-17T17:13:09.519581Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T17:13:09.519639Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T17:13:09.519671Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-17T17:13:09.519708Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T17:13:09.519803Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:09.519854Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:09.519906Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-04-17T17:13:09.525888Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:126:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-17T17:13:09.525952Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:13:09.526028Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T17:13:09.526167Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-17T17:13:09.526205Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-17T17:13:09.526271Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-17T17:13:09.526310Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T17:13:09.526341Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-17T17:13:09.526385Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-17T17:13:09.526424Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T17:13:09.526699Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-17T17:13:09.526733Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-17T17:13:09.526761Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-17T17:13:09.526789Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-17T17:13:09.526826Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-17T17:13:09.526855Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-17T17:13:09.526886Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-17T17:13:09.526914Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-17T17:13:09.526934Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-17T17:13:09.557153Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T17:13:09.557530Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T17:13:09.557792Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-17T17:13:09.558183Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-17T17:13:09.558399Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-17T17:13:09.562383Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:09.562587Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:09.562775Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-04-17T17:13:09.563432Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:126:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-17T17:13:09.563764Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:13:09.568240Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-17T17:13:09.568314Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T17:13:09.568363Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-17T17:13:09.568395Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-17T17:13:09.607763Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-17T17:13:09.608173Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:13:09.609138Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:09.609305Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:09.609477Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T17:13:09.609636Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:13:09.609761Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T17:13:09.610027Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-17T17:13:09.610349Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-17T17:13:09.610618Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T17:13:09.610781Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-17T17:13:09.611100Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-17T17:13:09.615746Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-17T17:13:09.616932Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-17T17:13:09.617115Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T17:13:09.617146Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-17T17:13:09.617175Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-17T17:13:09.617203Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-17T17:13:09.617279Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T17:13:09.617401Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-17T17:13:09.617616Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T17:13:09.617852Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T17:13:09.617919Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-17T17:13:09.617968Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-17T17:13:09.618007Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-17T17:13:09.618048Z node 1 :TX_DATA ... 025-04-17T17:14:42.083493Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:14:42.083710Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T17:14:42.083745Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:24] at 9437184 on unit CompleteOperation 2025-04-17T17:14:42.083788Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 24] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T17:14:42.083821Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:14:42.084008Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T17:14:42.084047Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:25] at 9437184 on unit CompleteOperation 2025-04-17T17:14:42.084090Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 25] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T17:14:42.084142Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:14:42.084323Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T17:14:42.084354Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:26] at 9437184 on unit CompleteOperation 2025-04-17T17:14:42.084396Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 26] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T17:14:42.084430Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:14:42.084639Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T17:14:42.084671Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:27] at 9437184 on unit CompleteOperation 2025-04-17T17:14:42.084713Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 27] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T17:14:42.084745Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:14:42.086368Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T17:14:42.086419Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:28] at 9437184 on unit CompleteOperation 2025-04-17T17:14:42.086472Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 28] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T17:14:42.086510Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:14:42.086897Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T17:14:42.086931Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:29] at 9437184 on unit CompleteOperation 2025-04-17T17:14:42.086979Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 29] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T17:14:42.087012Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:14:42.087200Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T17:14:42.087230Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:30] at 9437184 on unit CompleteOperation 2025-04-17T17:14:42.087272Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 30] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T17:14:42.087317Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:14:42.087538Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T17:14:42.087571Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:31] at 9437184 on unit CompleteOperation 2025-04-17T17:14:42.087615Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 31] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T17:14:42.087647Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:14:42.087853Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T17:14:42.087900Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:32] at 9437184 on unit CompleteOperation 2025-04-17T17:14:42.087944Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 32] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T17:14:42.087977Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:14:42.088198Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T17:14:42.088232Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:33] at 9437184 on unit CompleteOperation 2025-04-17T17:14:42.088276Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 33] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T17:14:42.088308Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:14:42.088467Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T17:14:42.088498Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:34] at 9437184 on unit CompleteOperation 2025-04-17T17:14:42.088541Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 34] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T17:14:42.088574Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:14:42.088697Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T17:14:42.088728Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:35] at 9437184 on unit CompleteOperation 2025-04-17T17:14:42.088787Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 35] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T17:14:42.088829Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:14:42.088968Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T17:14:42.088999Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:36] at 9437184 on unit CompleteOperation 2025-04-17T17:14:42.089046Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 36] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T17:14:42.089078Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:14:42.089276Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T17:14:42.089307Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:37] at 9437184 on unit CompleteOperation 2025-04-17T17:14:42.089347Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 37] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T17:14:42.089430Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:14:42.089695Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:235:2228], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 36 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2025-04-17T17:14:42.089746Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:14:42.089792Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 36 2025-04-17T17:14:42.089984Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:235:2228], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-04-17T17:14:42.090020Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:14:42.090052Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 5 2025-04-17T17:14:42.090157Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:235:2228], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 6 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2025-04-17T17:14:42.090191Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:14:42.090221Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 6 2025-04-17T17:14:42.090329Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:235:2228], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-04-17T17:14:42.090364Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:14:42.090394Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 7 2025-04-17T17:14:42.090451Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:235:2228], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-04-17T17:14:42.090482Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:14:42.090513Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 8 2025-04-17T17:14:42.090622Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:235:2228], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-04-17T17:14:42.090656Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:14:42.090685Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 10 2025-04-17T17:14:42.090743Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:235:2228], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-04-17T17:14:42.090775Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:14:42.090806Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 12 2025-04-17T17:14:42.090886Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:235:2228], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 13 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 9} 2025-04-17T17:14:42.090920Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:14:42.090948Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 13 expect 26 26 24 26 27 27 25 27 31 31 31 31 24 28 28 25 21 23 23 25 18 25 31 25 21 31 4 - - - - - actual 26 26 24 26 27 27 25 27 31 31 31 31 24 28 28 25 21 23 23 25 18 25 31 25 21 31 4 - - - - - interm 6 4 4 3 2 3 6 6 4 3 3 3 - 4 - 6 - 4 - 0 - - - - - - 4 - - - - - >> KqpSnapshotRead::TestReadOnly-withSink >> KqpQuery::QueryFromSqs [GOOD] |76.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |76.3%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |76.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |76.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/security/ut/ydb-core-security-ut |76.3%| [LD] {RESULT} $(B)/ydb/core/security/ut/ydb-core-security-ut |76.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ut/ydb-core-security-ut >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeLeftBorder-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder+EvWrite >> DataShardReadIterator::ShouldReadFromHeadToMvccWithConflict+UseSink [GOOD] >> DataShardReadIterator::ShouldReadFromHeadToMvccWithConflict-UseSink >> TopicAutoscaling::PartitionSplit_ManySession_existed_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_OffsetCommit >> SystemView::AuthPermissions_Selects [GOOD] |76.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |76.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |76.4%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryFromSqs [GOOD] Test command err: Trying to start YDB, gRPC: 6104, MsgBus: 6079 2025-04-17T17:14:18.334691Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494331637580944388:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:18.336227Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/002455/r3tmp/tmpMc4cXx/pdisk_1.dat 2025-04-17T17:14:18.752090Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:14:18.752191Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:14:18.760409Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:14:18.770417Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6104, node 1 2025-04-17T17:14:18.858308Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:14:18.858329Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:14:18.858339Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:14:18.858457Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6079 TClient is connected to server localhost:6079 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:14:19.462716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:19.491414Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:14:19.508100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:14:19.680266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T17:14:19.901540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:14:20.000270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T17:14:21.610827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331650465847911:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:21.610932Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:21.925148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:14:21.975831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:14:22.010010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:14:22.079602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:14:22.122539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:14:22.184832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:14:22.244758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:14:22.341831Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331654760815877:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:22.341911Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:22.342107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331654760815882:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:22.345640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:14:22.361258Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494331654760815884:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:14:22.427389Z node 1 :TX_PROXY ERROR: Actor# [1:7494331654760815942:3552] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:14:23.309054Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494331637580944388:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:23.312522Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:14:23.402533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 23333, MsgBus: 61525 2025-04-17T17:14:25.142163Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494331667919342954:2080];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:25.144516Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/002455/r3tmp/tmpcrVpBd/pdisk_1.dat 2025-04-17T17:14:25.289522Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:14:25.312197Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:14:25.312274Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:14:25.316797Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23333, node 2 2025-04-17T17:14:25.372627Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:14:25.372646Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:14:25.372652Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:14:25.372757Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61525 TClient is connected to server localhost:61525 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:14:25.982244Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:25.995266Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:26.078551Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 28147497671 ... 902421Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T17:14:34.956861Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T17:14:34.996655Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T17:14:35.080959Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T17:14:35.126568Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T17:14:35.227453Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494331710410468731:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:35.227542Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:35.228053Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494331710410468736:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:35.240091Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T17:14:35.259632Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494331710410468738:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T17:14:35.356739Z node 3 :TX_PROXY ERROR: Actor# [3:7494331710410468795:3544] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } query_phases { duration_us: 4411 table_access { name: "/Root/TwoShard" reads { rows: 3 bytes: 35 } partitions_count: 1 } cpu_time_us: 3147 affected_shards: 1 } query_phases { duration_us: 7134 table_access { name: "/Root/EightShard" updates { rows: 3 bytes: 47 } partitions_count: 1 } cpu_time_us: 2481 affected_shards: 2 } compilation { duration_us: 193196 cpu_time_us: 189817 } process_cpu_time_us: 595 total_duration_us: 209815 total_cpu_time_us: 196040 Trying to start YDB, gRPC: 14048, MsgBus: 7297 2025-04-17T17:14:37.852671Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494331717869918140:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:37.852739Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/002455/r3tmp/tmps4UEwI/pdisk_1.dat 2025-04-17T17:14:38.175758Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14048, node 4 2025-04-17T17:14:38.213292Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:14:38.213359Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:14:38.215059Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:14:38.264715Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:14:38.264742Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:14:38.264752Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:14:38.264890Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7297 TClient is connected to server localhost:7297 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:14:38.884310Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:38.891088Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T17:14:38.909542Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:39.017230Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:39.236267Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:39.318185Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:41.881471Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494331735049789090:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:41.881578Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:41.964348Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T17:14:42.011049Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T17:14:42.072910Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T17:14:42.102503Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T17:14:42.132058Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T17:14:42.180554Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T17:14:42.257444Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T17:14:42.347369Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494331739344757060:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:42.347497Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:42.347687Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494331739344757065:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:42.354911Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T17:14:42.373321Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494331739344757067:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T17:14:42.475300Z node 4 :TX_PROXY ERROR: Actor# [4:7494331739344757123:3549] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:14:42.927165Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494331717869918140:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:42.945064Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:14:43.647924Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:1, at schemeshard: 72057594046644480 |76.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |76.4%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |76.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant >> TableCreation::MultipleTablesCreation |76.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |76.4%| [LD] {RESULT} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |76.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW+VolatileTxs [GOOD] >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW+VolatileTxs [GOOD] >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs >> DataShardReadIterator::ShouldProperlyOrderConflictingTransactionsMvcc+UseSink [GOOD] >> DataShardReadIterator::ShouldProperlyOrderConflictingTransactionsMvcc-UseSink >> StreamCreator::Basic [GOOD] >> TConsoleTests::TestCreateTenantAlreadyExistsExtSubdomain [GOOD] >> TConsoleTests::TestCreateSubSubDomain >> StreamCreator::WithResolvedTimestamps [GOOD] >> TopicAutoscaling::ReadFromTimestamp_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReadFromTimestamp_PQv1 >> TColumnShardTestReadWrite::ReadWithProgramLike [GOOD] >> DataShardReadIteratorSysTables::ShouldForbidSchemaVersion [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::AuthPermissions_Selects [GOOD] Test command err: 2025-04-17T17:11:19.948757Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494330866208728854:2128];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:11:20.104398Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/000ad5/r3tmp/tmp9khFTW/pdisk_1.dat 2025-04-17T17:11:22.452227Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:11:22.696351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:11:22.696420Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:11:22.710599Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:11:23.033860Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25919, node 1 2025-04-17T17:11:24.260594Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:11:24.260615Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:11:24.260622Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:11:24.260715Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9271 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:11:24.770098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:11:24.805235Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:11:24.814098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:11:24.941869Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494330866208728854:2128];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:11:24.941920Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:11:30.134887Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494330913453369816:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:11:30.134992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:11:30.135495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494330913453369828:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:11:30.138607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T17:11:30.150143Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494330913453369830:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T17:11:30.231190Z node 1 :TX_PROXY ERROR: Actor# [1:7494330913453369881:2414] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:11:30.721368Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01js2bqwjmez87a7d9mkj5n5de, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzQ5NzgxYjItYzI5ODY1NzItMzEyZjgyMGQtYjZiOWZmNWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:11:31.170367Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01js2bqxk0bqbjy50885znhqk5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmJiZmMwZTktMjBiODczMDYtNWMwMDAxZjUtYTFkNDBiN2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:11:31.490622Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01js2bqxnw6nwbkrcgxye2b19a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2IzM2UxNTgtMWYxNzVhZGEtYTI3YzQwNGMtZmE4NGJiYmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:11:31.493604Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7494330917748337280:2379], owner: [1:7494330917748337277:2377], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2025-04-17T17:11:31.494921Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7494330917748337280:2379], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-17T17:11:31.495310Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7494330917748337280:2379], row count: 2, finished: 1 2025-04-17T17:11:31.495332Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7494330917748337280:2379], owner: [1:7494330917748337277:2377], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2025-04-17T17:11:31.498885Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744909891487, txId: 281474976710663] shutting down 2025-04-17T17:11:39.223884Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494330953133488982:2207];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/000ad5/r3tmp/tmp5TMC3d/pdisk_1.dat 2025-04-17T17:11:39.660027Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:11:41.131204Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:11:41.135549Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:41.269988Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:11:41.270070Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:11:41.277227Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23632, node 2 2025-04-17T17:11:41.429136Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:11:41.429156Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:11:41.429162Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:11:41.429303Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19216 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:11:42.225456Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:11:42.382548Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "Tenant1" } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-17T17:11:42.382758Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TCreateExtSubDomain Propose, path/Root/Tenant1, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T17:11:42.382838Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: Tenant1, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-17T17:11:42.383006Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-04-17T17:11:42.383165Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-17T17:11:42.383284Z node 2 :FLAT_TX_SCHE ... erloaded , top size by CPU # 0, top size by TLI # 0, time# 2025-04-17T17:14:42.844031Z 2025-04-17T17:14:42.904393Z node 33 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector: TEvProcessOverloaded , top size by CPU # 0, top size by TLI # 0, time# 2025-04-17T17:14:42.904226Z 2025-04-17T17:14:43.001700Z node 32 :KQP_EXECUTER ERROR: TxId: 281474976715688. Ctx: { TraceId: 01js2bxrpj5bze2aadnk4r4yj4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=32&id=ZGIwN2RlNGItYjE4ZmE1MWQtOWJlNzYzOTEtY2I3MTg2ZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:14:43.006415Z node 32 :SYSTEM_VIEWS INFO: Scan started, actor: [32:7494331745190074973:2430], owner: [32:7494331745190074970:2428], scan id: 0, table id: [72057594046644480:1:0:auth_permissions] 2025-04-17T17:14:43.012125Z node 32 :SYSTEM_VIEWS INFO: Scan prepared, actor: [32:7494331745190074973:2430], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-17T17:14:43.012160Z node 32 :SYSTEM_VIEWS DEBUG: ProceedToScan, tenant name: /Root tenant owner: root@builtin subject sid: empty require admin access: 0 is admin: 1 2025-04-17T17:14:43.012265Z node 32 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:14:43.012714Z node 32 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user2 },{ Sid: user1 }] Groups: [] } Children [.metadata,Dir1,Table0,Tenant1,Tenant2] }] } 2025-04-17T17:14:43.012768Z node 32 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [32:7494331745190074973:2430], row count: 0, finished: 0 2025-04-17T17:14:43.014237Z node 32 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:14:43.016622Z node 32 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1 TableId: [72057594046644480:9:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [SubDir1,SubDir2] }] } 2025-04-17T17:14:43.016681Z node 32 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [32:7494331745190074973:2430], row count: 0, finished: 0 2025-04-17T17:14:43.020549Z node 32 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1/SubDir1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:14:43.024674Z node 32 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1/SubDir1 TableId: [72057594046644480:10:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [] }] } 2025-04-17T17:14:43.024764Z node 32 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [32:7494331745190074973:2430], row count: 2, finished: 0 2025-04-17T17:14:43.032918Z node 32 :SYSTEM_VIEWS INFO: Scan finished, actor: [32:7494331745190074973:2430], owner: [32:7494331745190074970:2428], scan id: 0, table id: [72057594046644480:1:0:auth_permissions] 2025-04-17T17:14:43.036604Z node 32 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744910083000, txId: 281474976715687] shutting down 2025-04-17T17:14:43.037238Z node 32 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [32:7494331697945432615:2208], database# , query hash# 3187945588805523718, cpu time# 246870 2025-04-17T17:14:43.246021Z node 32 :KQP_EXECUTER ERROR: TxId: 281474976715690. Ctx: { TraceId: 01js2bxrzjexdajej77pb1vc7v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=32&id=MmIwODllMzEtNWYyZjFhZWEtZTc4N2FhMDctMzVjYmE2NTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:14:43.249471Z node 32 :SYSTEM_VIEWS INFO: Scan started, actor: [32:7494331745190075014:2440], owner: [32:7494331745190075011:2438], scan id: 0, table id: [72057594046644480:1:0:auth_permissions] 2025-04-17T17:14:43.251146Z node 32 :SYSTEM_VIEWS INFO: Scan prepared, actor: [32:7494331745190075014:2440], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-17T17:14:43.251172Z node 32 :SYSTEM_VIEWS DEBUG: ProceedToScan, tenant name: /Root tenant owner: root@builtin subject sid: empty require admin access: 0 is admin: 1 2025-04-17T17:14:43.251246Z node 32 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:14:43.251631Z node 32 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user2 },{ Sid: user1 }] Groups: [] } Children [.metadata,Dir1,Table0,Tenant1,Tenant2] }] } 2025-04-17T17:14:43.251682Z node 32 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [32:7494331745190075014:2440], row count: 0, finished: 0 2025-04-17T17:14:43.253408Z node 32 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:14:43.255845Z node 32 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1 TableId: [72057594046644480:9:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [SubDir1,SubDir2] }] } 2025-04-17T17:14:43.255896Z node 32 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [32:7494331745190075014:2440], row count: 0, finished: 0 2025-04-17T17:14:43.256098Z node 32 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1/SubDir1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:14:43.256480Z node 32 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1/SubDir1 TableId: [72057594046644480:10:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [] }] } 2025-04-17T17:14:43.256569Z node 32 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [32:7494331745190075014:2440], row count: 1, finished: 0 2025-04-17T17:14:43.256687Z node 32 :SYSTEM_VIEWS INFO: Scan finished, actor: [32:7494331745190075014:2440], owner: [32:7494331745190075011:2438], scan id: 0, table id: [72057594046644480:1:0:auth_permissions] 2025-04-17T17:14:43.260848Z node 32 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744910083243, txId: 281474976715689] shutting down 2025-04-17T17:14:43.261436Z node 32 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [32:7494331697945432615:2208], database# , query hash# 15123460272068726277, cpu time# 184266 2025-04-17T17:14:43.269491Z node 32 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 36 2025-04-17T17:14:43.270400Z node 32 :HIVE WARN: HIVE#72057594037968897 Node(36, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T17:14:43.270546Z node 32 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 35 2025-04-17T17:14:43.270774Z node 32 :HIVE WARN: HIVE#72057594037968897 Node(35, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T17:14:43.290000Z node 32 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 33 2025-04-17T17:14:43.290873Z node 32 :HIVE WARN: HIVE#72057594037968897 Node(33, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T17:14:43.291003Z node 32 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 34 2025-04-17T17:14:43.291187Z node 32 :HIVE WARN: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T17:14:43.294982Z node 33 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:14:43.284891Z node 36 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 |76.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/apps/ydbd/ydbd |76.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydbd/ydbd |76.4%| [LD] {RESULT} $(B)/ydb/apps/ydbd/ydbd >> Normalizers::EmptyTablesNormalizer [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> StreamCreator::Basic [GOOD] Test command err: 2025-04-17T17:14:42.900201Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494331737046571046:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:42.910356Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/0010cf/r3tmp/tmpZQ4jQ3/pdisk_1.dat 2025-04-17T17:14:43.593982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:14:43.594078Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:14:43.597825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:14:43.639019Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:15457 TServer::EnableGrpc on GrpcPort 24126, node 1 2025-04-17T17:14:44.057460Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:14:44.057495Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:14:44.057504Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:14:44.057671Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15457 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:14:44.685806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:44.721289Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:14:44.753366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744910084897 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744910084764 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744910084897 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: ".sys" PathId: 1844... (TRUNCATED) 2025-04-17T17:14:45.012489Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T17:14:45.012617Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T17:14:45.012631Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-17T17:14:45.016227Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-17T17:14:46.796905Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744910084897, tx_id: 281474976710658 } } } 2025-04-17T17:14:46.797328Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-17T17:14:46.800788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T17:14:46.801625Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-04-17T17:14:46.801661Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2025-04-17T17:14:46.842363Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-04-17T17:14:46.842402Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 3] 2025-04-17T17:14:46.848416Z node 1 :REPLICATION_CONTROLLER TRACE: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::NController::TEvPrivate::TEvAllowCreateStream 2025-04-17T17:14:46.967919Z node 1 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037888:1][1:7494331754226441154:2346] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:5:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-04-17T17:14:47.022774Z node 1 :REPLICATION_CONTROLLER TRACE: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTableResponse { Result: { status: SUCCESS, issues: } } 2025-04-17T17:14:47.022805Z node 1 :REPLICATION_CONTROLLER INFO: [StreamCreator][rid 1][tid 1] Success: issues# 2025-04-17T17:14:47.047445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:14:47.070677Z node 1 :REPLICATION_CONTROLLER TRACE: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTopicResponse { Result: { status: SUCCESS, issues: } } 2025-04-17T17:14:47.070712Z node 1 :REPLICATION_CONTROLLER INFO: [StreamCreator][rid 1][tid 1] Success: issues# TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744910084897 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyC... (TRUNCATED) >> KqpProxy::NoLocalSessionExecution ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadWithProgramLike [GOOD] Test command err: 2025-04-17T17:14:45.068798Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T17:14:45.202391Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T17:14:45.225527Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T17:14:45.225834Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T17:14:45.233610Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T17:14:45.233807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T17:14:45.234048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T17:14:45.234168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T17:14:45.234284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T17:14:45.234384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T17:14:45.234466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T17:14:45.234574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T17:14:45.234688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T17:14:45.234804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T17:14:45.234938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T17:14:45.235042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T17:14:45.266665Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T17:14:45.266843Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T17:14:45.266926Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T17:14:45.267088Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T17:14:45.267271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T17:14:45.267342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T17:14:45.267383Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T17:14:45.267471Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T17:14:45.267533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T17:14:45.267569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T17:14:45.267597Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T17:14:45.267789Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T17:14:45.267849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T17:14:45.267885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T17:14:45.267915Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T17:14:45.267985Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T17:14:45.268027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T17:14:45.268060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T17:14:45.268085Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T17:14:45.268189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T17:14:45.268227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T17:14:45.268259Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T17:14:45.268300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T17:14:45.268335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T17:14:45.268371Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T17:14:45.268739Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=66; 2025-04-17T17:14:45.330486Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=90; 2025-04-17T17:14:45.330617Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=53; 2025-04-17T17:14:45.330745Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=56; 2025-04-17T17:14:45.330975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T17:14:45.331042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T17:14:45.331085Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T17:14:45.331293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T17:14:45.331335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T17:14:45.331366Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T17:14:45.331496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T17:14:45.331536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T17:14:45.331565Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T17:14:45.331766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T17:14:45.331826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T17:14:45.331853Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T17:14:45.331962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T17:14:45.331997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T17:14:45.332053Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... duce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-17T17:14:48.441284Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-04-17T17:14:48.441331Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-04-17T17:14:48.441379Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=6; 2025-04-17T17:14:48.441426Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=10;merger=0;interval_id=6; 2025-04-17T17:14:48.441529Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-04-17T17:14:48.441652Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-04-17T17:14:48.441690Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=10;finished=1; 2025-04-17T17:14:48.441741Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-17T17:14:48.441960Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T17:14:48.442095Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:10;schema=message: string;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-04-17T17:14:48.442131Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-17T17:14:48.442232Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;);columns=1;rows=10; 2025-04-17T17:14:48.442282Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=61;num_rows=10;batch_columns=message; 2025-04-17T17:14:48.442390Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:299:2317];bytes=61;rows=10;faults=0;finished=0;fault=0;schema=message: string; 2025-04-17T17:14:48.442535Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-04-17T17:14:48.442630Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-04-17T17:14:48.442720Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-04-17T17:14:48.442877Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T17:14:48.442969Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-04-17T17:14:48.443049Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-04-17T17:14:48.443082Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:300:2318] finished for tablet 9437184 2025-04-17T17:14:48.443466Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:299:2317];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.099},{"events":["f_ack","l_task_result"],"t":0.206},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.207}],"full":{"a":1744910088235460,"name":"_full_task","f":1744910088235460,"d_finished":0,"c":0,"l":1744910088443123,"d":207663},"events":[{"name":"bootstrap","f":1744910088235618,"d_finished":99395,"c":1,"l":1744910088335013,"d":99395},{"a":1744910088442861,"name":"ack","f":1744910088441936,"d_finished":816,"c":1,"l":1744910088442752,"d":1078},{"a":1744910088442851,"name":"processing","f":1744910088335092,"d_finished":104351,"c":9,"l":1744910088442754,"d":104623},{"name":"ProduceResults","f":1744910088236832,"d_finished":88697,"c":12,"l":1744910088443070,"d":88697},{"a":1744910088443072,"name":"Finish","f":1744910088443072,"d_finished":0,"c":0,"l":1744910088443123,"d":51},{"name":"task_result","f":1744910088335110,"d_finished":103377,"c":8,"l":1744910088441791,"d":103377}],"id":"9437184::6"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-04-17T17:14:48.541941Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:299:2317];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T17:14:48.542391Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:299:2317];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.099},{"events":["f_ack","l_task_result"],"t":0.206},{"events":["l_ProduceResults","f_Finish"],"t":0.207},{"events":["l_ack","l_processing","l_Finish"],"t":0.306}],"full":{"a":1744910088235460,"name":"_full_task","f":1744910088235460,"d_finished":0,"c":0,"l":1744910088542019,"d":306559},"events":[{"name":"bootstrap","f":1744910088235618,"d_finished":99395,"c":1,"l":1744910088335013,"d":99395},{"a":1744910088442861,"name":"ack","f":1744910088441936,"d_finished":816,"c":1,"l":1744910088442752,"d":99974},{"a":1744910088442851,"name":"processing","f":1744910088335092,"d_finished":104351,"c":9,"l":1744910088442754,"d":203519},{"name":"ProduceResults","f":1744910088236832,"d_finished":88697,"c":12,"l":1744910088443070,"d":88697},{"a":1744910088443072,"name":"Finish","f":1744910088443072,"d_finished":0,"c":0,"l":1744910088542019,"d":98947},{"name":"task_result","f":1744910088335110,"d_finished":103377,"c":8,"l":1744910088441791,"d":103377}],"id":"9437184::6"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-04-17T17:14:48.542483Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T17:14:48.235055Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-04-17T17:14:48.542532Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T17:14:48.542773Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::EmptyTablesNormalizer [GOOD] Test command err: 2025-04-17T17:14:44.620721Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T17:14:44.831032Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T17:14:44.856198Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T17:14:44.856476Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T17:14:44.864438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=PortionsCleaner; 2025-04-17T17:14:44.864650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-04-17T17:14:44.864826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T17:14:44.865167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T17:14:44.865296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T17:14:44.865403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T17:14:44.865499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T17:14:44.865584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T17:14:44.865689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T17:14:44.865793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T17:14:44.865910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T17:14:44.866010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T17:14:44.866118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T17:14:44.895336Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T17:14:44.895578Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=PortionsCleaner; 2025-04-17T17:14:44.895621Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-04-17T17:14:44.896077Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=47; 2025-04-17T17:14:44.896184Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=39; 2025-04-17T17:14:44.896278Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=53; 2025-04-17T17:14:44.896360Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=45; 2025-04-17T17:14:44.896505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=PortionsCleaner;id=NO_VALUE_OPTIONAL; 2025-04-17T17:14:44.896577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2025-04-17T17:14:44.896627Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-04-17T17:14:44.896751Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T17:14:44.896815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T17:14:44.896874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T17:14:44.896925Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-04-17T17:14:44.896998Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T17:14:44.897044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T17:14:44.897104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T17:14:44.897132Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-04-17T17:14:44.897286Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T17:14:44.897378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T17:14:44.897419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T17:14:44.897444Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-04-17T17:14:44.897520Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T17:14:44.897584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T17:14:44.897624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T17:14:44.897655Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=8;type=CleanInsertionDedup; 2025-04-17T17:14:44.897747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T17:14:44.897782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T17:14:44.897808Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-04-17T17:14:44.897851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T17:14:44.897897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T17:14:44.897920Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T17:14:44.898239Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=31; 2025-04-17T17:14:44.898314Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=27; 2025-04-17T17:14:44.898378Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=25; 2025-04-17T17:14:44.898434Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=22; 2025-04-17T17:14:44.898587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T17:14:44.898631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T17:14:44.898659Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T17:14:44.898847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T17:14:44.898886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T17:14:44.898928Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T17:14:44.899110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpda ... node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T17:14:48.780428Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:402:2415];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-17T17:14:48.780523Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:402:2415];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-17T17:14:48.780559Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-17T17:14:48.780588Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-17T17:14:48.780638Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:402:2415];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T17:14:48.780740Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:402:2415];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T17:14:48.780814Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:402:2415];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=0; 2025-04-17T17:14:48.780901Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:402:2415];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T17:14:48.780981Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:402:2415];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T17:14:48.781030Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:402:2415];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T17:14:48.781133Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:402:2415];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T17:14:48.992468Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 111 scanId: 0 version: {1744910085794:111} readable: {1744910085794:max} at tablet 9437184 2025-04-17T17:14:48.992621Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 111 scanId: 0 at tablet 9437184 2025-04-17T17:14:48.992821Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:402:2415];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={1744910085794:111};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } Columns { Id: 3 } } } ; 2025-04-17T17:14:48.992926Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:402:2415];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={1744910085794:111};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } Columns { Id: 3 } } } ; 2025-04-17T17:14:48.993754Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:402:2415];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={1744910085794:111};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":7,"inputs":[]},{"owner_id":0,"inputs":[{"from":2},{"from":4},{"from":6}]},{"owner_id":2,"inputs":[{"from":7}]},{"owner_id":4,"inputs":[{"from":7}]},{"owner_id":6,"inputs":[{"from":7}]}],"nodes":{"2":{"p":{"i":"1","p":{"address":{"name":"key1","id":1}},"o":"1","t":"AssembleOriginalData"},"w":11,"id":2},"6":{"p":{"i":"3","p":{"address":{"name":"field","id":3}},"o":"3","t":"AssembleOriginalData"},"w":11,"id":6},"7":{"p":{"p":{"data":[{"name":"key1","id":1},{"name":"key2","id":2},{"name":"field","id":3}]},"o":"1,2,3","t":"FetchOriginalData"},"w":6,"id":7},"4":{"p":{"i":"2","p":{"address":{"name":"key2","id":2}},"o":"2","t":"AssembleOriginalData"},"w":11,"id":4},"0":{"p":{"i":"1,2,3","t":"Projection"},"w":33,"id":0}}}; 2025-04-17T17:14:48.993890Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[1:402:2415];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={1744910085794:111};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-17T17:14:49.001006Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[1:402:2415];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={1744910085794:111};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:166;event=TTxScan started;actor_id=[1:463:2468];trace_detailed=; 2025-04-17T17:14:49.001755Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:84;ff_first=(column_ids=1,2,3;column_names=field,key1,key2;);; 2025-04-17T17:14:49.001979Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; 2025-04-17T17:14:49.002429Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:463:2468];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T17:14:49.002605Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:463:2468];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-17T17:14:49.002741Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:463:2468];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-17T17:14:49.002789Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:463:2468] finished for tablet 9437184 2025-04-17T17:14:49.003218Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:463:2468];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:461:2467];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","l_ack","f_processing","l_processing","f_ProduceResults","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1744910089000896,"name":"_full_task","f":1744910089000896,"d_finished":0,"c":0,"l":1744910089002851,"d":1955},"events":[{"name":"bootstrap","f":1744910089001194,"d_finished":983,"c":1,"l":1744910089002177,"d":983},{"a":1744910089002351,"name":"ack","f":1744910089002351,"d_finished":0,"c":0,"l":1744910089002851,"d":500},{"a":1744910089002331,"name":"processing","f":1744910089002331,"d_finished":0,"c":0,"l":1744910089002851,"d":520},{"name":"ProduceResults","f":1744910089002154,"d_finished":317,"c":2,"l":1744910089002771,"d":317},{"a":1744910089002776,"name":"Finish","f":1744910089002776,"d_finished":0,"c":0,"l":1744910089002851,"d":75}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-17T17:14:49.003332Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:463:2468];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:461:2467];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T17:14:49.003749Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:463:2468];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:461:2467];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1744910089000896,"name":"_full_task","f":1744910089000896,"d_finished":0,"c":0,"l":1744910089003390,"d":2494},"events":[{"name":"bootstrap","f":1744910089001194,"d_finished":983,"c":1,"l":1744910089002177,"d":983},{"a":1744910089002351,"name":"ack","f":1744910089002351,"d_finished":0,"c":0,"l":1744910089003390,"d":1039},{"a":1744910089002331,"name":"processing","f":1744910089002331,"d_finished":0,"c":0,"l":1744910089003390,"d":1059},{"name":"ProduceResults","f":1744910089002154,"d_finished":317,"c":2,"l":1744910089002771,"d":317},{"a":1744910089002776,"name":"Finish","f":1744910089002776,"d_finished":0,"c":0,"l":1744910089003390,"d":614}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-17T17:14:49.003846Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:463:2468];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T17:14:48.993847Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-17T17:14:49.003888Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:463:2468];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T17:14:49.003993Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:463:2468];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64;records=0;count=20048; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64;records=0;count=20048; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=string;records=0;size=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=0;count=0; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> StreamCreator::WithResolvedTimestamps [GOOD] Test command err: 2025-04-17T17:14:42.900352Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494331740776883635:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:42.907237Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/001101/r3tmp/tmpG88aHG/pdisk_1.dat 2025-04-17T17:14:43.592997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:14:43.593116Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:14:43.594492Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:14:43.619268Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:21857 TServer::EnableGrpc on GrpcPort 27417, node 1 2025-04-17T17:14:44.165592Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:14:44.165609Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:14:44.165615Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:14:44.165707Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21857 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:14:44.772220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:44.801044Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:14:44.804833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744910084925 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744910084834 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744910084925 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: ".sys" PathId: 1844... (TRUNCATED) 2025-04-17T17:14:45.020291Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T17:14:45.020408Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T17:14:45.020420Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-17T17:14:45.021001Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-17T17:14:47.461688Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744910084925, tx_id: 281474976710658 } } } 2025-04-17T17:14:47.462077Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-17T17:14:47.463840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T17:14:47.465332Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-04-17T17:14:47.465347Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2025-04-17T17:14:47.498769Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-04-17T17:14:47.498800Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 3] 2025-04-17T17:14:47.501019Z node 1 :REPLICATION_CONTROLLER TRACE: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::NController::TEvPrivate::TEvAllowCreateStream 2025-04-17T17:14:47.602041Z node 1 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037888:1][1:7494331762251721035:2346] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:5:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-04-17T17:14:47.633626Z node 1 :REPLICATION_CONTROLLER TRACE: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTableResponse { Result: { status: SUCCESS, issues: } } 2025-04-17T17:14:47.633654Z node 1 :REPLICATION_CONTROLLER INFO: [StreamCreator][rid 1][tid 1] Success: issues# 2025-04-17T17:14:47.653939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:14:47.675156Z node 1 :REPLICATION_CONTROLLER TRACE: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTopicResponse { Result: { status: SUCCESS, issues: } } 2025-04-17T17:14:47.675180Z node 1 :REPLICATION_CONTROLLER INFO: [StreamCreator][rid 1][tid 1] Success: issues# TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744910084925 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyC... (TRUNCATED) 2025-04-17T17:14:47.884258Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494331740776883635:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:47.884363Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |76.4%| [TA] $(B)/ydb/core/tx/replication/controller/ut_stream_creator/test-results/unittest/{meta.json ... results_accumulator.log} |76.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |76.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |76.5%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/test-results/unittest/{meta.json ... results_accumulator.log} |76.5%| [LD] {RESULT} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIteratorSysTables::ShouldForbidSchemaVersion [GOOD] Test command err: 2025-04-17T17:13:13.123084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:13:13.123606Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:13:13.123671Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/0010f0/r3tmp/tmpnMRHjX/pdisk_1.dat 2025-04-17T17:13:13.682220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:13:13.717985Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:13:13.763069Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:13:13.763213Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:13:13.775985Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:13:13.857684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:13:13.953137Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:13:13.954290Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:13:13.954762Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T17:13:13.955036Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:13:14.013415Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:13:14.014172Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:13:14.014279Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:13:14.016070Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:13:14.016595Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:13:14.016667Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:13:14.017037Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:13:14.017188Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:13:14.017267Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T17:13:14.029156Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:13:14.072514Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:13:14.072735Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:13:14.072863Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T17:13:14.072899Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:13:14.072935Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:13:14.072980Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:13:14.073242Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:14.073307Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:14.073575Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:13:14.073662Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:13:14.073772Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:13:14.073824Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:13:14.073869Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T17:13:14.073905Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:13:14.073957Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:13:14.073997Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:13:14.074044Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:13:14.074190Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:14.074231Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:14.074275Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T17:13:14.074661Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T17:13:14.074715Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:13:14.074824Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:13:14.075120Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T17:13:14.075176Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:13:14.075277Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:13:14.075325Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:13:14.075380Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T17:13:14.075416Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T17:13:14.075452Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:13:14.075739Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T17:13:14.075782Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T17:13:14.075828Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:13:14.075875Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:13:14.075922Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T17:13:14.075980Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:13:14.076015Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T17:13:14.076051Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:13:14.076077Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T17:13:14.077581Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T17:13:14.077639Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:13:14.092763Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:13:14.092839Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:13:14.092879Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:13:14.092917Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-17T17:13:14.092995Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T17:13:14.247696Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:14.247753Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:14.247790Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T17:13:14.248154Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-17T17:13:14.248195Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:13:14.248298Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:13:14.248347Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-17T17:13:14.248386Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-17T17:13:14.248420Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-17T17:13:14.252275Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T17:13:14.252350Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:13:14.253119Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:14.253160Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:14.253203Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:13:1 ... -04-17T17:14:48.267447Z node 13 :TX_DATASHARD TRACE: Execution status for [2500:281474976715663] at 72075186224037890 is Executed 2025-04-17T17:14:48.267474Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [2500:281474976715663] at 72075186224037890 executing on unit DropIndexNotice 2025-04-17T17:14:48.267498Z node 13 :TX_DATASHARD TRACE: Add [2500:281474976715663] at 72075186224037890 to execution unit MoveTable 2025-04-17T17:14:48.267525Z node 13 :TX_DATASHARD TRACE: Trying to execute [2500:281474976715663] at 72075186224037890 on unit MoveTable 2025-04-17T17:14:48.267553Z node 13 :TX_DATASHARD TRACE: Execution status for [2500:281474976715663] at 72075186224037890 is Executed 2025-04-17T17:14:48.267577Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [2500:281474976715663] at 72075186224037890 executing on unit MoveTable 2025-04-17T17:14:48.267598Z node 13 :TX_DATASHARD TRACE: Add [2500:281474976715663] at 72075186224037890 to execution unit MoveIndex 2025-04-17T17:14:48.267624Z node 13 :TX_DATASHARD TRACE: Trying to execute [2500:281474976715663] at 72075186224037890 on unit MoveIndex 2025-04-17T17:14:48.267653Z node 13 :TX_DATASHARD TRACE: Execution status for [2500:281474976715663] at 72075186224037890 is Executed 2025-04-17T17:14:48.267677Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [2500:281474976715663] at 72075186224037890 executing on unit MoveIndex 2025-04-17T17:14:48.267714Z node 13 :TX_DATASHARD TRACE: Add [2500:281474976715663] at 72075186224037890 to execution unit CreateCdcStream 2025-04-17T17:14:48.267740Z node 13 :TX_DATASHARD TRACE: Trying to execute [2500:281474976715663] at 72075186224037890 on unit CreateCdcStream 2025-04-17T17:14:48.267767Z node 13 :TX_DATASHARD TRACE: Execution status for [2500:281474976715663] at 72075186224037890 is Executed 2025-04-17T17:14:48.267792Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [2500:281474976715663] at 72075186224037890 executing on unit CreateCdcStream 2025-04-17T17:14:48.267818Z node 13 :TX_DATASHARD TRACE: Add [2500:281474976715663] at 72075186224037890 to execution unit AlterCdcStream 2025-04-17T17:14:48.267846Z node 13 :TX_DATASHARD TRACE: Trying to execute [2500:281474976715663] at 72075186224037890 on unit AlterCdcStream 2025-04-17T17:14:48.267886Z node 13 :TX_DATASHARD TRACE: Execution status for [2500:281474976715663] at 72075186224037890 is Executed 2025-04-17T17:14:48.267911Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [2500:281474976715663] at 72075186224037890 executing on unit AlterCdcStream 2025-04-17T17:14:48.267936Z node 13 :TX_DATASHARD TRACE: Add [2500:281474976715663] at 72075186224037890 to execution unit DropCdcStream 2025-04-17T17:14:48.267964Z node 13 :TX_DATASHARD TRACE: Trying to execute [2500:281474976715663] at 72075186224037890 on unit DropCdcStream 2025-04-17T17:14:48.267991Z node 13 :TX_DATASHARD TRACE: Execution status for [2500:281474976715663] at 72075186224037890 is Executed 2025-04-17T17:14:48.268018Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [2500:281474976715663] at 72075186224037890 executing on unit DropCdcStream 2025-04-17T17:14:48.268043Z node 13 :TX_DATASHARD TRACE: Add [2500:281474976715663] at 72075186224037890 to execution unit CreateIncrementalRestoreSrc 2025-04-17T17:14:48.268082Z node 13 :TX_DATASHARD TRACE: Trying to execute [2500:281474976715663] at 72075186224037890 on unit CreateIncrementalRestoreSrc 2025-04-17T17:14:48.277905Z node 13 :TX_DATASHARD TRACE: Execution status for [2500:281474976715663] at 72075186224037890 is Executed 2025-04-17T17:14:48.277995Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [2500:281474976715663] at 72075186224037890 executing on unit CreateIncrementalRestoreSrc 2025-04-17T17:14:48.278045Z node 13 :TX_DATASHARD TRACE: Add [2500:281474976715663] at 72075186224037890 to execution unit CompleteOperation 2025-04-17T17:14:48.278088Z node 13 :TX_DATASHARD TRACE: Trying to execute [2500:281474976715663] at 72075186224037890 on unit CompleteOperation 2025-04-17T17:14:48.278625Z node 13 :TX_DATASHARD TRACE: Execution status for [2500:281474976715663] at 72075186224037890 is DelayComplete 2025-04-17T17:14:48.278696Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [2500:281474976715663] at 72075186224037890 executing on unit CompleteOperation 2025-04-17T17:14:48.278777Z node 13 :TX_DATASHARD TRACE: Add [2500:281474976715663] at 72075186224037890 to execution unit CompletedOperations 2025-04-17T17:14:48.278855Z node 13 :TX_DATASHARD TRACE: Trying to execute [2500:281474976715663] at 72075186224037890 on unit CompletedOperations 2025-04-17T17:14:48.278902Z node 13 :TX_DATASHARD TRACE: Execution status for [2500:281474976715663] at 72075186224037890 is Executed 2025-04-17T17:14:48.278930Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [2500:281474976715663] at 72075186224037890 executing on unit CompletedOperations 2025-04-17T17:14:48.278978Z node 13 :TX_DATASHARD TRACE: Execution plan for [2500:281474976715663] at 72075186224037890 has finished 2025-04-17T17:14:48.279063Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:14:48.279140Z node 13 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037890 2025-04-17T17:14:48.279218Z node 13 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-04-17T17:14:48.279281Z node 13 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-04-17T17:14:48.292590Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [13:24:2071], Recipient [13:985:2787]: {TEvRegisterTabletResult TabletId# 72075186224037890 Entry# 2000} 2025-04-17T17:14:48.292678Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-04-17T17:14:48.292749Z node 13 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037890 time 2000 2025-04-17T17:14:48.292828Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-17T17:14:48.293716Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 2500} 2025-04-17T17:14:48.293843Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-17T17:14:48.295965Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-17T17:14:48.296029Z node 13 :TX_DATASHARD TRACE: Complete execution for [2500:281474976715663] at 72075186224037890 on unit CreateTable 2025-04-17T17:14:48.296100Z node 13 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-04-17T17:14:48.303513Z node 13 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037890 2025-04-17T17:14:48.303593Z node 13 :TX_DATASHARD TRACE: Complete execution for [2500:281474976715663] at 72075186224037890 on unit CompleteOperation 2025-04-17T17:14:48.303723Z node 13 :TX_DATASHARD DEBUG: Complete [2500 : 281474976715663] from 72075186224037890 at tablet 72075186224037890 send result to client [13:410:2405], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T17:14:48.303829Z node 13 :TX_DATASHARD INFO: 72075186224037890 Sending notify to schemeshard 72057594046644480 txId 281474976715663 state Ready TxInFly 0 2025-04-17T17:14:48.304044Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-17T17:14:48.306125Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [13:686:2582], Recipient [13:884:2711]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T17:14:48.306182Z node 13 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-17T17:14:48.312286Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [13:686:2582], Recipient [13:667:2571]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T17:14:48.312355Z node 13 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:14:48.312705Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [13:686:2582], Recipient [13:985:2787]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T17:14:48.312741Z node 13 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-04-17T17:14:48.313436Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [13:24:2071], Recipient [13:985:2787]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 2500 ReadStep# 2500 } 2025-04-17T17:14:48.313497Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-04-17T17:14:48.313578Z node 13 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037890 coordinator 72057594046316545 last step 0 next step 2500 2025-04-17T17:14:48.314080Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [13:1027:2820], Recipient [13:985:2787]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046644480 Status: OK ServerId: [13:1030:2823] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-17T17:14:48.314135Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-17T17:14:48.315631Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [13:410:2405], Recipient [13:985:2787]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715663 2025-04-17T17:14:48.315719Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-04-17T17:14:48.315800Z node 13 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715663 datashard 72075186224037890 state Ready 2025-04-17T17:14:48.315908Z node 13 :TX_DATASHARD DEBUG: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2025-04-17T17:14:48.322609Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [13:1045:2832], Recipient [13:985:2787]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:14:48.322711Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:14:48.322796Z node 13 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [13:1044:2831], serverId# [13:1045:2832], sessionId# [0:0:0] 2025-04-17T17:14:48.322982Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [13:1043:2830], Recipient [13:985:2787]: NKikimrTxDataShard.TEvGetInfoRequest 2025-04-17T17:14:48.324747Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [13:594:2519], Recipient [13:667:2571]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72075186224037888 TableId: 2 SchemaVersion: 1111 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-04-17T17:14:48.325113Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-17T17:14:48.325256Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-17T17:14:48.325438Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [13:1047:2834], Recipient [13:985:2787]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:14:48.325501Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:14:48.325589Z node 13 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [13:1046:2833], serverId# [13:1047:2834], sessionId# [0:0:0] >> DataShardReadIterator::ShouldReadRangeChunk3 [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk100 >> TableCreation::ConcurrentTableCreationWithDifferentVersions >> TColumnShardTestReadWrite::ReadAggregate [GOOD] >> TTabletPipeTest::TestSendAfterOpen |76.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |76.5%| [LD] {RESULT} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |76.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut >> KqpProxy::PassErrroViaSessionActor >> TPipeTrackerTest::TestShareTablet [GOOD] >> TPipeTrackerTest::TestIdempotentAttachDetach [GOOD] >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowByCount [GOOD] >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowBySize >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedSingleBucket >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedSingleBucket [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedNoOverflowCheck >> TPersQueueMirrorer::TestBasicRemote [GOOD] >> TPersQueueMirrorer::ValidStartStream >> TTabletPipeTest::TestSendAfterOpen [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadAggregate [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8328;columns=19; 2025-04-17T17:14:44.691151Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T17:14:44.825190Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T17:14:44.852486Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T17:14:44.852875Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T17:14:44.861270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T17:14:44.861497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T17:14:44.861754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T17:14:44.861906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T17:14:44.862019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T17:14:44.862132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T17:14:44.862254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T17:14:44.862394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T17:14:44.862509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T17:14:44.862615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T17:14:44.862748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T17:14:44.862876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T17:14:44.893803Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T17:14:44.893955Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T17:14:44.894058Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T17:14:44.894268Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T17:14:44.894475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T17:14:44.894551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T17:14:44.894590Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T17:14:44.894690Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T17:14:44.894761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T17:14:44.894802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T17:14:44.894829Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T17:14:44.895026Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T17:14:44.895092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T17:14:44.895127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T17:14:44.895164Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T17:14:44.895248Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T17:14:44.895295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T17:14:44.895332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T17:14:44.895359Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T17:14:44.895437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T17:14:44.895474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T17:14:44.895501Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T17:14:44.895556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T17:14:44.895602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T17:14:44.895639Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T17:14:44.896021Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=61; 2025-04-17T17:14:44.896098Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=31; 2025-04-17T17:14:44.896196Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=36; 2025-04-17T17:14:44.896270Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=35; 2025-04-17T17:14:44.896459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T17:14:44.896526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T17:14:44.896564Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T17:14:44.896748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T17:14:44.896785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T17:14:44.896810Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T17:14:44.896971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T17:14:44.897015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T17:14:44.897040Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T17:14:44.897198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T17:14:44.897240Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T17:14:44.897284Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T17:14:44.897407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T17:14:44.897440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T17:14:44.897490Z node 1 :TX_COLUMNSHARD INFO: tablet_i ... UG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-17T17:14:51.198316Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-04-17T17:14:51.198354Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-04-17T17:14:51.198396Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=76; 2025-04-17T17:14:51.198436Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=1;merger=0;interval_id=76; 2025-04-17T17:14:51.198471Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-04-17T17:14:51.198565Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-04-17T17:14:51.198597Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=1;finished=1; 2025-04-17T17:14:51.198624Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-17T17:14:51.199061Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T17:14:51.199180Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:1;schema=100: binary 101: binary 102: binary 103: uint64;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-04-17T17:14:51.250409Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-17T17:14:51.250604Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;);columns=4;rows=1; 2025-04-17T17:14:51.250703Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=26;num_rows=1;batch_columns=100,101,102,103; 2025-04-17T17:14:51.250827Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[2:434:2452];bytes=26;rows=1;faults=0;finished=0;fault=0;schema=100: binary 101: binary 102: binary 103: uint64; 2025-04-17T17:14:51.250945Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-04-17T17:14:51.251062Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-04-17T17:14:51.251155Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-04-17T17:14:51.251688Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T17:14:51.251820Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-04-17T17:14:51.251908Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-04-17T17:14:51.251939Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: Scan [2:435:2453] finished for tablet 9437184 2025-04-17T17:14:51.252368Z node 2 :TX_COLUMNSHARD_SCAN INFO: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[2:434:2452];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.011},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.064}],"full":{"a":1744910091187600,"name":"_full_task","f":1744910091187600,"d_finished":0,"c":0,"l":1744910091251985,"d":64385},"events":[{"name":"bootstrap","f":1744910091187824,"d_finished":2120,"c":1,"l":1744910091189944,"d":2120},{"a":1744910091251468,"name":"ack","f":1744910091199038,"d_finished":52146,"c":1,"l":1744910091251184,"d":52663},{"a":1744910091251454,"name":"processing","f":1744910091191080,"d_finished":57370,"c":10,"l":1744910091251187,"d":57901},{"name":"ProduceResults","f":1744910091189012,"d_finished":53743,"c":13,"l":1744910091251925,"d":53743},{"a":1744910091251927,"name":"Finish","f":1744910091251927,"d_finished":0,"c":0,"l":1744910091251985,"d":58},{"name":"task_result","f":1744910091191096,"d_finished":5098,"c":9,"l":1744910091198664,"d":5098}],"id":"9437184::76"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-04-17T17:14:51.252442Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[2:434:2452];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T17:14:51.252763Z node 2 :TX_COLUMNSHARD_SCAN INFO: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[2:434:2452];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.011},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.064}],"full":{"a":1744910091187600,"name":"_full_task","f":1744910091187600,"d_finished":0,"c":0,"l":1744910091252475,"d":64875},"events":[{"name":"bootstrap","f":1744910091187824,"d_finished":2120,"c":1,"l":1744910091189944,"d":2120},{"a":1744910091251468,"name":"ack","f":1744910091199038,"d_finished":52146,"c":1,"l":1744910091251184,"d":53153},{"a":1744910091251454,"name":"processing","f":1744910091191080,"d_finished":57370,"c":10,"l":1744910091251187,"d":58391},{"name":"ProduceResults","f":1744910091189012,"d_finished":53743,"c":13,"l":1744910091251925,"d":53743},{"a":1744910091251927,"name":"Finish","f":1744910091251927,"d_finished":0,"c":0,"l":1744910091252475,"d":548},{"name":"task_result","f":1744910091191096,"d_finished":5098,"c":9,"l":1744910091198664,"d":5098}],"id":"9437184::76"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-04-17T17:14:51.252845Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T17:14:51.187137Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=16001;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=16001;selected_rows=0; 2025-04-17T17:14:51.252877Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T17:14:51.253099Z node 2 :TX_COLUMNSHARD_SCAN INFO: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;; >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedNoOverflowCheck [GOOD] |76.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TPipeTrackerTest::TestIdempotentAttachDetach [GOOD] >> TTabletPipeTest::TestConsumerSidePipeReset |76.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendAfterOpen [GOOD] >> TTabletPipeTest::TestConsumerSidePipeReset [GOOD] >> TTabletPipeTest::TestInterconnectSession |76.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedNoOverflowCheck [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips-EvWrite >> TableCreation::MultipleTablesCreation [GOOD] >> TableCreation::CreateOldTable >> KqpSnapshotRead::TestReadOnly-withSink [GOOD] >> KqpSnapshotRead::TestSnapshotExpiration+withSink >> TTabletPipeTest::TestInterconnectSession [GOOD] >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs [GOOD] |76.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |76.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |76.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |76.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestInterconnectSession [GOOD] >> TTabletPipeTest::TestSendAfterReboot >> LabeledDbCounters::OneTabletRemoveCounters [GOOD] >> LabeledDbCounters::OneTabletRestart >> TestKinesisHttpProxy::UnauthorizedGetShardIteratorRequest >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder-EvWrite |76.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |76.5%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |76.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader >> SystemView::AuthPermissions [GOOD] >> SystemView::AuthPermissions_Access ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minstep/unittest >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs [GOOD] Test command err: 2025-04-17T17:14:42.994986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:14:42.997556Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:14:42.997654Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/00137a/r3tmp/tmpqFFSu8/pdisk_1.dat 2025-04-17T17:14:43.580373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:14:43.604306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T17:14:43.605814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:14:43.610804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-17T17:14:43.625627Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 HANDLE EvProposeTransaction marker# C0 2025-04-17T17:14:43.625696Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 step# 500 Status# 16 SEND to# [1:409:2404] Proxy marker# C1 2025-04-17T17:14:43.684681Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:14:43.691811Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Merged config: { } 2025-04-17T17:14:43.733045Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvRegisterNode from [1:333:2374] HiveId: 72057594037968897 ServicedDomains { SchemeShard: 72057594046644480 PathId: 1 } TabletAvailability { Type: Mediator Priority: 0 } TabletAvailability { Type: Dummy Priority: 0 } TabletAvailability { Type: KeyValue Priority: 0 } TabletAvailability { Type: Coordinator Priority: 0 } TabletAvailability { Type: Hive Priority: 0 } TabletAvailability { Type: SchemeShard Priority: 0 } TabletAvailability { Type: DataShard Priority: 0 } TabletAvailability { Type: PersQueue Priority: 0 } TabletAvailability { Type: PersQueueReadBalancer Priority: 0 } TabletAvailability { Type: Kesus Priority: 0 } TabletAvailability { Type: SysViewProcessor Priority: 0 } TabletAvailability { Type: ColumnShard Priority: 0 } TabletAvailability { Type: SequenceShard Priority: 0 } TabletAvailability { Type: ReplicationController Priority: 0 } TabletAvailability { Type: StatisticsAggregator Priority: 0 } 2025-04-17T17:14:43.734839Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Execute 2025-04-17T17:14:43.735024Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:14:43.735073Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-04-17T17:14:43.735106Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-04-17T17:14:43.735149Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-04-17T17:14:43.735185Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-04-17T17:14:43.735400Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:14:43.735613Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-04-17T17:14:43.735662Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-04-17T17:14:43.735715Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2025-04-17T17:14:43.735752Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-04-17T17:14:43.735930Z node 1 :HIVE DEBUG: HIVE#72057594037968897 TEvInterconnect::TEvNodeInfo NodeId 1 Location DataCenter: "1" Module: "1" Rack: "1" Unit: "1" 2025-04-17T17:14:43.746619Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Complete 2025-04-17T17:14:43.746698Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Node(1) Ping([1:333:2374]) 2025-04-17T17:14:43.746783Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-04-17T17:14:43.747490Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle::TEvSyncTablets 2025-04-17T17:14:43.747594Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxSyncTablets([1:333:2374])::Execute 2025-04-17T17:14:43.748434Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-04-17T17:14:43.748540Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxSyncTablets([1:333:2374])::Complete 2025-04-17T17:14:43.758874Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvStatus for Node 1: Status: 0 StartTime: 0 ResourceMaximum { Memory: 270443339776 } 2025-04-17T17:14:43.759049Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(1)::Execute 2025-04-17T17:14:43.759125Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:14:43.772495Z node 1 :HIVE DEBUG: HIVE#72057594037968897 AddRegisteredDataCentersNode(1, 1) 2025-04-17T17:14:43.772613Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-04-17T17:14:43.772655Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-04-17T17:14:43.772867Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-04-17T17:14:43.772899Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-04-17T17:14:43.772923Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2025-04-17T17:14:43.772960Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-04-17T17:14:43.785331Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(1)::Complete 2025-04-17T17:14:43.785385Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-04-17T17:14:43.857128Z node 1 :TX_COORDINATOR DEBUG: Transaction 1 has been planned 2025-04-17T17:14:43.857204Z node 1 :TX_COORDINATOR DEBUG: Planned transaction 1 for mediator 72057594046382081 tablet 72057594046644480 2025-04-17T17:14:43.857429Z node 1 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 1000 in 0.500000s at 0.950000s 2025-04-17T17:14:43.857685Z node 1 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 500, txid# 1 marker# C2 2025-04-17T17:14:43.857732Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 stepId# 500 Status# 17 SEND EvProposeTransactionStatus to# [1:409:2404] Proxy 2025-04-17T17:14:43.858306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T17:14:43.859530Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-04-17T17:14:43.859629Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2025-04-17T17:14:43.859668Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 acknowledged 2025-04-17T17:14:43.859720Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 acknowledged 2025-04-17T17:14:43.859960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:14:43.860027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2025-04-17T17:14:43.860924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2025-04-17T17:14:43.940354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/table-1, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:14:43.976063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T17:14:43.976158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:14:43.976904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table-1 2025-04-17T17:14:43.982455Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvHive::TEvCreateTablet(DataShard(72057594046644480,1)) 2025-04-17T17:14:44.024217Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute Owner: 72057594046644480 OwnerIdx: 1 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-04-17T17:14:44.024322Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Hive 72057594037968897 allocated TabletId 72075186224037888 from TabletIdIndex 65536 2025-04-17T17:14:44.038115Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for type DataShard: {} 2025-04-17T17:14:44.038236Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for object (72057594046644480,2): {} 2025-04-17T17:14:44.038302Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for profile 'default': {Memory: 1048576} 2025-04-17T17:14:44.049657Z node 1 :HIVE DEBUG: HIVE#72057594037968897 CreateTabletFollowers Tablet DataShard.72075186224037888.Leader.0 2025-04-17T17:14:44.050434Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 Status: OK 2025-04-17T17:14:44.050586Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::AssignTabletGroups TEvControllerSelectGroups tablet 72075186224037888 GroupParameters { StoragePoolSpecifier { Name: "/Root:test" } } ReturnAllMatchingGroups: true 2025-04-17T17:14:44.051149Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Connected to tablet 72057594037932033 from tablet 72057594037968897 2025-04-17T17:14:44.051463Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle TEvControllerSelectGroupsResult: success Status: OK NewStyleQuerySupported: true MatchingGroups { Groups { ErasureSpecies: 0 GroupID: 2181038080 StoragePoolName: "/Root:test" AssuredResources { } CurrentResources { } PhysicalGroup: true Decommitted: false } } 2025-04-17T17:14:44.052211Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923048996544}(72075186224037888,HIVE_REASSIGN_REASON_NO,[]) 2025-04-17T17:14:44.052294Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923048996544}: tablet 72075186224037888 channel 0 assigned to group 2181038080 2025-04-17T17:14:44.052446Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923048996544}: tablet 72075186224037888 channel 1 assigned to group 2181038080 2025-04-17T17:14:44.052528Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923048996544}: tablet 72075186224037888 channel 2 assigned to group 21810380 ... TxBlockStorageResult::Execute(72075186224037888 OK) 2025-04-17T17:14:53.969325Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxBlockStorageResult::Complete(72075186224037888 OK) 2025-04-17T17:14:53.970079Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle::TEvInitiateDeleteStorage TabletId=72075186224037888 2025-04-17T17:14:53.970384Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-17T17:14:53.970754Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus, TabletId: 72075186224037888 2025-04-17T17:14:53.970805Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-04-17T17:14:53.970956Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTabletResult::Execute(72075186224037888 OK) 2025-04-17T17:14:53.985760Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-17T17:14:53.987239Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715665 HANDLE EvProposeTransaction marker# C0 2025-04-17T17:14:53.987315Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715665 step# 3500 Status# 16 SEND to# [2:409:2404] Proxy marker# C1 2025-04-17T17:14:54.000893Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTabletResult(72075186224037888)::Complete SideEffects {} 2025-04-17T17:14:54.087283Z node 2 :TX_COORDINATOR DEBUG: Transaction 281474976715665 has been planned 2025-04-17T17:14:54.087407Z node 2 :TX_COORDINATOR DEBUG: Planned transaction 281474976715665 for mediator 72057594046382081 tablet 72057594046644480 2025-04-17T17:14:54.087460Z node 2 :TX_COORDINATOR DEBUG: Planned transaction 281474976715665 for mediator 72057594046382081 tablet 72075186224037889 2025-04-17T17:14:54.087774Z node 2 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 4000 in 0.500000s at 3.950000s 2025-04-17T17:14:54.088316Z node 2 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 3500, txid# 281474976715665 marker# C2 2025-04-17T17:14:54.088403Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715665 stepId# 3500 Status# 17 SEND EvProposeTransactionStatus to# [2:409:2404] Proxy 2025-04-17T17:14:54.089109Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715665 at step 3500 at tablet 72075186224037889 { Transactions { TxId: 281474976715665 AckTo { RawX1: 0 RawX2: 0 } } Step: 3500 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-04-17T17:14:54.089166Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T17:14:54.089343Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 3500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T17:14:54.089730Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T17:14:54.089773Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:14:54.089818Z node 2 :TX_DATASHARD DEBUG: Found ready operation [3500:281474976715665] in PlanQueue unit at 72075186224037889 2025-04-17T17:14:54.090031Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3500:281474976715665 keys extracted: 0 2025-04-17T17:14:54.090142Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T17:14:54.090344Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T17:14:54.090416Z node 2 :TX_DATASHARD INFO: Trying to DROP TABLE at 72075186224037889 2025-04-17T17:14:54.090799Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:14:54.092040Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3500} 2025-04-17T17:14:54.100339Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-17T17:14:54.101722Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-17T17:14:54.101818Z node 2 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715665] from 72075186224037889 at tablet 72075186224037889 send result to client [2:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T17:14:54.101884Z node 2 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715665 state PreOffline TxInFly 0 2025-04-17T17:14:54.101994Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T17:14:54.102103Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-04-17T17:14:54.102227Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:25] persistent tx 281474976715665 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2025-04-17T17:14:54.102276Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:25] persistent tx 281474976715665 for mediator 72057594046382081 tablet 72075186224037889 removed=1 2025-04-17T17:14:54.102310Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:25] persistent tx 281474976715665 for mediator 72057594046382081 acknowledged 2025-04-17T17:14:54.102359Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:25] persistent tx 281474976715665 acknowledged 2025-04-17T17:14:54.103327Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715665, done: 0, blocked: 1 2025-04-17T17:14:54.111084Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715665 datashard 72075186224037889 state PreOffline 2025-04-17T17:14:54.111201Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-04-17T17:14:54.111829Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715665:0 2025-04-17T17:14:54.111950Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715665, publications: 2, subscribers: 1 2025-04-17T17:14:54.119101Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715665, subscribers: 1 2025-04-17T17:14:54.119651Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-17T17:14:54.120966Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=Y2U0ODQ4Y2QtNzk1MTM4ZmYtNjAyOWI0NzItY2NmNDI0YWI= 2025-04-17 17:14:54.120 INFO ydb-core-tx-datashard-ut_minstep(pid=66638, tid=0x00007FF589938D00) [core exec] yql_execution.cpp:133: Completed async execution for node #42 2025-04-17T17:14:54.121098Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=Y2U0ODQ4Y2QtNzk1MTM4ZmYtNjAyOWI0NzItY2NmNDI0YWI= 2025-04-17 17:14:54.121 INFO ydb-core-tx-datashard-ut_minstep(pid=66638, tid=0x00007FF589938D00) [core exec] yql_execution.cpp:153: State is ExecutionComplete after apply async changes for node #42 2025-04-17T17:14:54.121184Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=Y2U0ODQ4Y2QtNzk1MTM4ZmYtNjAyOWI0NzItY2NmNDI0YWI= 2025-04-17 17:14:54.121 INFO ydb-core-tx-datashard-ut_minstep(pid=66638, tid=0x00007FF589938D00) [core exec] yql_execution.cpp:59: Begin, root #43 2025-04-17T17:14:54.121241Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=Y2U0ODQ4Y2QtNzk1MTM4ZmYtNjAyOWI0NzItY2NmNDI0YWI= 2025-04-17 17:14:54.121 INFO ydb-core-tx-datashard-ut_minstep(pid=66638, tid=0x00007FF589938D00) [core exec] yql_execution.cpp:72: Collect unused nodes for root #43, status: Ok 2025-04-17T17:14:54.121297Z node 2 :KQP_YQL TRACE: SessionId: ydb://session/3?node_id=2&id=Y2U0ODQ4Y2QtNzk1MTM4ZmYtNjAyOWI0NzItY2NmNDI0YWI= 2025-04-17 17:14:54.121 TRACE ydb-core-tx-datashard-ut_minstep(pid=66638, tid=0x00007FF589938D00) [core exec] yql_execution.cpp:387: {0}, callable #43 2025-04-17T17:14:54.121389Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=Y2U0ODQ4Y2QtNzk1MTM4ZmYtNjAyOWI0NzItY2NmNDI0YWI= 2025-04-17 17:14:54.121 INFO ydb-core-tx-datashard-ut_minstep(pid=66638, tid=0x00007FF589938D00) [core exec] yql_execution.cpp:577: Node #43 finished execution 2025-04-17T17:14:54.121472Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=Y2U0ODQ4Y2QtNzk1MTM4ZmYtNjAyOWI0NzItY2NmNDI0YWI= 2025-04-17 17:14:54.121 INFO ydb-core-tx-datashard-ut_minstep(pid=66638, tid=0x00007FF589938D00) [core exec] yql_execution.cpp:594: Node #43 created 0 trackable nodes: 2025-04-17T17:14:54.121529Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=Y2U0ODQ4Y2QtNzk1MTM4ZmYtNjAyOWI0NzItY2NmNDI0YWI= 2025-04-17 17:14:54.121 INFO ydb-core-tx-datashard-ut_minstep(pid=66638, tid=0x00007FF589938D00) [core exec] yql_execution.cpp:87: Finish, output #43, status: Ok 2025-04-17T17:14:54.121586Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=Y2U0ODQ4Y2QtNzk1MTM4ZmYtNjAyOWI0NzItY2NmNDI0YWI= 2025-04-17 17:14:54.121 INFO ydb-core-tx-datashard-ut_minstep(pid=66638, tid=0x00007FF589938D00) [core exec] yql_execution.cpp:93: Creating finalizing transformer, output #43 2025-04-17T17:14:54.121790Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=Y2U0ODQ4Y2QtNzk1MTM4ZmYtNjAyOWI0NzItY2NmNDI0YWI= 2025-04-17 17:14:54.121 NOTE ydb-core-tx-datashard-ut_minstep(pid=66638, tid=0x00007FF589938D00) [common provider] yql_provider_gateway.cpp:21:
: Info: Execution, code: 1060 2025-04-17T17:14:54.121853Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=Y2U0ODQ4Y2QtNzk1MTM4ZmYtNjAyOWI0NzItY2NmNDI0YWI= 2025-04-17 17:14:54.121 NOTE ydb-core-tx-datashard-ut_minstep(pid=66638, tid=0x00007FF589938D00) [common provider] yql_provider_gateway.cpp:21:
:1:12: Info: Executing DROP TABLE 2025-04-17T17:14:54.121899Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=Y2U0ODQ4Y2QtNzk1MTM4ZmYtNjAyOWI0NzItY2NmNDI0YWI= 2025-04-17 17:14:54.121 NOTE ydb-core-tx-datashard-ut_minstep(pid=66638, tid=0x00007FF589938D00) [common provider] yql_provider_gateway.cpp:21:
: Info: Success, code: 4 2025-04-17T17:14:54.146031Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-17T17:14:54.146306Z node 2 :TX_DATASHARD INFO: 72075186224037889 Initiating switch from PreOffline to Offline state 2025-04-17T17:14:54.148311Z node 2 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-04-17T17:14:54.149350Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-04-17T17:14:54.149840Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186224037889 2025-04-17T17:14:54.149904Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute Tablet 72075186224037889 2025-04-17T17:14:54.150019Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Tablet(DataShard.72075186224037889.Leader.1) VolatileState: Running -> Stopped (Node 2) 2025-04-17T17:14:54.150153Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Sending TEvStopTablet(DataShard.72075186224037889.Leader.1 gen 1) to node 2 2025-04-17T17:14:54.156938Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() result Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 >> TTabletPipeTest::TestSendAfterReboot [GOOD] >> PartitionEndWatcher::EmptyPartition [GOOD] >> PartitionEndWatcher::AfterCommit [GOOD] >> YdbProxy::AlterTable >> TConsoleTests::TestCreateSubSubDomain [GOOD] >> TConsoleTests::TestCreateSubSubDomainExtSubdomain >> DataShardReadIterator::ShouldReadFromHeadToMvccWithConflict-UseSink [GOOD] >> Cdc::NaN[YdsRunner] [GOOD] >> Cdc::NaN[TopicRunner] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendAfterReboot [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:108:2057] recipient: [1:104:2137] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:108:2057] recipient: [1:104:2137] Leader for TabletID 9437185 is [0:0:0] sender: [1:109:2057] recipient: [1:105:2138] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [1:109:2057] recipient: [1:105:2138] Leader for TabletID 9437184 is [1:116:2145] sender: [1:117:2057] recipient: [1:104:2137] Leader for TabletID 9437185 is [1:119:2147] sender: [1:121:2057] recipient: [1:105:2138] Leader for TabletID 9437184 is [1:116:2145] sender: [1:156:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:119:2147] sender: [1:158:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:119:2147] sender: [1:161:2057] recipient: [1:101:2136] Leader for TabletID 9437185 is [1:119:2147] sender: [1:163:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:119:2147] sender: [1:165:2057] recipient: [1:164:2176] Leader for TabletID 9437185 is [1:166:2177] sender: [1:167:2057] recipient: [1:164:2176] Leader for TabletID 9437185 is [1:166:2177] sender: [1:195:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [1:116:2145] sender: [1:198:2057] recipient: [1:100:2135] Leader for TabletID 9437184 is [1:116:2145] sender: [1:201:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [1:116:2145] sender: [1:202:2057] recipient: [1:200:2200] Leader for TabletID 9437184 is [1:203:2201] sender: [1:204:2057] recipient: [1:200:2200] Leader for TabletID 9437184 is [1:203:2201] sender: [1:232:2057] recipient: [1:14:2061] >> KqpProxy::PassErrroViaSessionActor [GOOD] >> KqpProxy::NodeDisconnectedTest >> KqpProxy::NoLocalSessionExecution [GOOD] >> KqpProxy::NoUserAccessToScriptExecutionsTable >> YdbProxy::CopyTable >> SystemView::AuthEffectivePermissions [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReadFromHeadToMvccWithConflict-UseSink [GOOD] Test command err: 2025-04-17T17:13:09.757331Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:13:09.758008Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:13:09.758076Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/00111c/r3tmp/tmpFdk20z/pdisk_1.dat 2025-04-17T17:13:10.162423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:13:10.198004Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:13:10.241341Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:13:10.241470Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:13:10.252791Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:13:10.341561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:13:10.380600Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:13:10.381645Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:13:10.382035Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T17:13:10.382256Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:13:10.436333Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:13:10.436960Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:13:10.437054Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:13:10.438213Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:13:10.438264Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:13:10.438295Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:13:10.438512Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:13:10.438602Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:13:10.438653Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T17:13:10.449723Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:13:10.471125Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:13:10.471318Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:13:10.471438Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T17:13:10.471471Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:13:10.471505Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:13:10.471544Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:13:10.471750Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:10.471792Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:10.472013Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:13:10.472095Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:13:10.472224Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:13:10.472270Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:13:10.472310Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T17:13:10.472341Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:13:10.472394Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:13:10.472426Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:13:10.472468Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:13:10.472613Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:10.472647Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:10.472686Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T17:13:10.473054Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T17:13:10.473111Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:13:10.473202Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:13:10.473421Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T17:13:10.473473Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:13:10.473540Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:13:10.473581Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:13:10.473612Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T17:13:10.473641Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T17:13:10.473667Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:13:10.473885Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T17:13:10.473920Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T17:13:10.473954Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:13:10.473993Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:13:10.474036Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T17:13:10.474082Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:13:10.474111Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T17:13:10.474136Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:13:10.474156Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T17:13:10.475404Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T17:13:10.475450Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:13:10.488718Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:13:10.488785Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:13:10.488811Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:13:10.488851Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-17T17:13:10.488918Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T17:13:10.673909Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:10.673956Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:10.674319Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T17:13:10.675868Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-17T17:13:10.675904Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:13:10.676012Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:13:10.676051Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-17T17:13:10.676091Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-17T17:13:10.684242Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-17T17:13:10.714796Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T17:13:10.714862Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:13:10.733457Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:10.733504Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:10.733555Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:13:1 ... teState, waiting for 0 compute actor(s) and 1 datashard(s): DS 72075186224037888 (Executing), 2025-04-17T17:14:55.489591Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3001} 2025-04-17T17:14:55.489642Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:14:55.489673Z node 15 :TX_DATASHARD TRACE: Complete execution for [3001:281474976715667] at 72075186224037888 on unit CompleteOperation 2025-04-17T17:14:55.489716Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:14:55.490926Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [15:1167:2935], Recipient [15:667:2571]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:14:55.490969Z node 15 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:14:55.491006Z node 15 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [15:1166:2934], serverId# [15:1167:2935], sessionId# [0:0:0] 2025-04-17T17:14:55.491830Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [15:594:2519], Recipient [15:667:2571]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 ResultFormat: FORMAT_ARROW MaxRowsInResult: 2 KeysSize: 6 2025-04-17T17:14:55.492074Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-17T17:14:55.492387Z node 15 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3001/281474976715667 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2025-04-17T17:14:55.492471Z node 15 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v3001/18446744073709551615 2025-04-17T17:14:55.492582Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2025-04-17T17:14:55.492754Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-17T17:14:55.492827Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2025-04-17T17:14:55.492889Z node 15 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-17T17:14:55.492942Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-17T17:14:55.493008Z node 15 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037888 2025-04-17T17:14:55.493078Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-17T17:14:55.493105Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T17:14:55.493126Z node 15 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2025-04-17T17:14:55.493152Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2025-04-17T17:14:55.493309Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 ResultFormat: FORMAT_ARROW MaxRowsInResult: 2 } 2025-04-17T17:14:55.493768Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Continue 2025-04-17T17:14:55.493816Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Continue at tablet# 72075186224037888 2025-04-17T17:14:55.493908Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-17T17:14:55.519155Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [15:1061:2855], Recipient [15:667:2571]: {TEvReadSet step# 3001 txid# 281474976715667 TabletSource# 72075186224037891 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037891 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-17T17:14:55.519284Z node 15 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-17T17:14:55.519361Z node 15 :TX_DATASHARD DEBUG: Receive RS at 72075186224037888 source 72075186224037891 dest 72075186224037888 producer 72075186224037891 txId 281474976715667 2025-04-17T17:14:55.519580Z node 15 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 3001 txid# 281474976715667 TabletSource# 72075186224037891 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037891 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-17T17:14:55.519817Z node 15 :TX_DATASHARD DEBUG: Complete [3001 : 281474976715667] from 72075186224037888 at tablet 72075186224037888 send result to client [15:1156:2910], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T17:14:55.519918Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-17T17:14:55.519988Z node 15 :TX_DATASHARD DEBUG: Found ready candidate operation [0:4] at 72075186224037888 for ExecuteRead 2025-04-17T17:14:55.520426Z node 15 :KQP_EXECUTER DEBUG: ActorId: [15:1156:2910] TxId: 281474976715667. Ctx: { TraceId: 01js2by4rzaytvwqq0tvtv63dq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=ZDQzZjJjYWItYjM3MjU5MGUtODEyOTU2MTItODg5MWUzYTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got propose result, shard: 72075186224037888, status: COMPLETE, error: 2025-04-17T17:14:55.520671Z node 15 :KQP_EXECUTER DEBUG: ActorId: [15:1156:2910] TxId: 281474976715667. Ctx: { TraceId: 01js2by4rzaytvwqq0tvtv63dq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=ZDQzZjJjYWItYjM3MjU5MGUtODEyOTU2MTItODg5MWUzYTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-17T17:14:55.520770Z node 15 :KQP_EXECUTER DEBUG: ActorId: [15:1156:2910] TxId: 281474976715667. Ctx: { TraceId: 01js2by4rzaytvwqq0tvtv63dq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=ZDQzZjJjYWItYjM3MjU5MGUtODEyOTU2MTItODg5MWUzYTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-04-17T17:14:55.520992Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [15:667:2571], Recipient [15:667:2571]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:14:55.521080Z node 15 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:14:55.522225Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:14:55.522560Z node 15 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 5, sender: [15:594:2519], selfId: [15:57:2104], source: [15:1134:2910] 2025-04-17T17:14:55.522944Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:14:55.523027Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-17T17:14:55.523102Z node 15 :TX_DATASHARD DEBUG: Return cached ready operation [0:4] at 72075186224037888 2025-04-17T17:14:55.523169Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2025-04-17T17:14:55.523368Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 2, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 ResultFormat: FORMAT_ARROW MaxRowsInResult: 2 } 2025-04-17T17:14:55.525472Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3001/18446744073709551615 2025-04-17T17:14:55.525589Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:594:2519], 1} after executionsCount# 2 2025-04-17T17:14:55.525750Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:594:2519], 1} sends rowCount# 2, bytes# 32, quota rows left# 18446744073709551613, quota bytes left# 18446744073709551583, hasUnreadQueries# 1, total queries# 6, firstUnprocessed# 0 2025-04-17T17:14:55.526291Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-17T17:14:55.526369Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2025-04-17T17:14:55.526444Z node 15 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T17:14:55.526500Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2025-04-17T17:14:55.526558Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-17T17:14:55.526585Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T17:14:55.526631Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037888 has finished 2025-04-17T17:14:55.526696Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:14:55.526781Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-17T17:14:55.526844Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:14:55.526905Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:14:55.527307Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:667:2571], Recipient [15:667:2571]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-17T17:14:55.527418Z node 15 :TX_DATASHARD TRACE: 72075186224037888 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 2 2025-04-17T17:14:55.527713Z node 15 :TX_DATASHARD TRACE: 72075186224037888 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 2 2025-04-17T17:14:55.527935Z node 15 :TX_DATASHARD TRACE: 72075186224037888 readContinue iterator# {[15:594:2519], 1} sends rowCount# 2, bytes# 32, quota rows left# 18446744073709551611, quota bytes left# 18446744073709551551, hasUnreadQueries# 1, total queries# 6, firstUnprocessed# 2 2025-04-17T17:14:55.529052Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:667:2571], Recipient [15:667:2571]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-17T17:14:55.529134Z node 15 :TX_DATASHARD TRACE: 72075186224037888 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 4 2025-04-17T17:14:55.529296Z node 15 :TX_DATASHARD TRACE: 72075186224037888 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 4 2025-04-17T17:14:55.529421Z node 15 :TX_DATASHARD TRACE: 72075186224037888 readContinue iterator# {[15:594:2519], 1} sends rowCount# 2, bytes# 32, quota rows left# 18446744073709551609, quota bytes left# 18446744073709551519, hasUnreadQueries# 0, total queries# 6, firstUnprocessed# 4 2025-04-17T17:14:55.529569Z node 15 :TX_DATASHARD DEBUG: 72075186224037888 read iterator# {[15:594:2519], 1} finished in ReadContinue 2025-04-17T17:14:55.529832Z node 15 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=15&id=ZDQzZjJjYWItYjM3MjU5MGUtODEyOTU2MTItODg5MWUzYTU=, workerId: [15:1134:2910], local sessions count: 0 2025-04-17T17:14:55.530065Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [15:61:2108], Recipient [15:1061:2855]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715666 LockNode: 15 Status: STATUS_NOT_FOUND >> TColumnShardTestReadWrite::WriteOverload+InStore+WithWritePortionsOnInsert [GOOD] >> TableCreation::ConcurrentTableCreationWithDifferentVersions [GOOD] >> TableCreation::ConcurrentUpdateTable >> YdbProxy::MakeDirectory >> DataShardReadIterator::ShouldProperlyOrderConflictingTransactionsMvcc-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteOverload+InStore+WithWritePortionsOnInsert [GOOD] Test command err: 2025-04-17T17:14:44.610890Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T17:14:44.846812Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T17:14:44.871031Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T17:14:44.871324Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T17:14:44.880372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T17:14:44.880574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T17:14:44.880786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T17:14:44.880908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T17:14:44.881046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T17:14:44.881140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T17:14:44.881245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T17:14:44.881364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T17:14:44.881871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T17:14:44.881998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T17:14:44.882105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T17:14:44.882197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T17:14:44.909262Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T17:14:44.909437Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T17:14:44.909504Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T17:14:44.909701Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T17:14:44.909860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T17:14:44.909936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T17:14:44.909979Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T17:14:44.910050Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T17:14:44.910108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T17:14:44.910142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T17:14:44.910165Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T17:14:44.910336Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T17:14:44.910395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T17:14:44.910431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T17:14:44.910461Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T17:14:44.910537Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T17:14:44.910596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T17:14:44.910654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T17:14:44.910681Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T17:14:44.910760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T17:14:44.910798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T17:14:44.910828Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T17:14:44.910881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T17:14:44.910924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T17:14:44.910956Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T17:14:44.911319Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=65; 2025-04-17T17:14:44.911393Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=34; 2025-04-17T17:14:44.911462Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=29; 2025-04-17T17:14:44.911544Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=47; 2025-04-17T17:14:44.911750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T17:14:44.911818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T17:14:44.911858Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T17:14:44.912072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T17:14:44.912143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T17:14:44.912174Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T17:14:44.912335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T17:14:44.912385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T17:14:44.912416Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T17:14:44.912565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T17:14:44.912597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T17:14:44.912619Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T17:14:44.984419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T17:14:44.984502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T17:14:44.984562Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... tablet_id=9437184; 2025-04-17T17:14:45.282264Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-17T17:14:45.282298Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-17T17:14:45.287355Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-17T17:14:45.287453Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T17:14:45.287509Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:521;problem=Background activities cannot be started: no index at tablet; 2025-04-17T17:14:45.610548Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=10;this=88923003853600;method=TTxController::StartProposeOnExecute;tx_info=10:TX_KIND_SCHEMA;min=1744910085804;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;fline=schema.h:36;event=sync_schema; 2025-04-17T17:14:45.614461Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1744910085804;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;this=88923003853600;op_tx=10:TX_KIND_SCHEMA;min=1744910085804;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1744910085804;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_this=89197881023040;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2025-04-17T17:14:45.614541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1744910085804;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;this=88923003853600;op_tx=10:TX_KIND_SCHEMA;min=1744910085804;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1744910085804;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_this=89197881023040;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:99:2134]; 2025-04-17T17:14:45.614594Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1744910085804;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;this=88923003853600;op_tx=10:TX_KIND_SCHEMA;min=1744910085804;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1744910085804;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_this=89197881023040;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=10; 2025-04-17T17:14:45.614900Z node 1 :TX_COLUMNSHARD DEBUG: TTxNotifyTxCompletion.Execute at tablet 9437184 2025-04-17T17:14:45.615084Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 1744910085804 at tablet 9437184, mediator 0 2025-04-17T17:14:45.615130Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[2] execute at tablet 9437184 2025-04-17T17:14:45.615502Z node 1 :TX_COLUMNSHARD DEBUG: EnsureTable for pathId: 1 ttl settings: { Version: 1 } at tablet 9437184 2025-04-17T17:14:45.631736Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=0; 2025-04-17T17:14:45.631912Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:245;method=RegisterTable;path_id=1; 2025-04-17T17:14:45.631982Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine.h:144;event=RegisterTable;path_id=1; 2025-04-17T17:14:45.653302Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:488;event=OnTieringModified;path_id=1; 2025-04-17T17:14:45.653512Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tx_controller.cpp:214;event=finished_tx;tx_id=10; 2025-04-17T17:14:45.686436Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[2] complete at tablet 9437184 2025-04-17T17:14:45.686638Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6330728;columns=10; CATCH TEvWritePortionResult, status OK 2025-04-17T17:14:56.292459Z node 1 :TX_COLUMNSHARD_WRITE WARN: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=columnshard__write.cpp:96;event=shard_overload;reason=writes_size_in_fly;sum=132945288;limit=128000000; 2025-04-17T17:14:56.294625Z node 1 :TX_COLUMNSHARD_WRITE WARN: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_OVERLOADED;details=overload data error;tx_id=0; CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK 2025-04-17T17:14:57.538992Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-04-17T17:14:57.563383Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-04-17T17:14:57.574374Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 1 2025-04-17T17:14:57.589312Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 1 2025-04-17T17:14:57.591954Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:3 Blob count: 1 2025-04-17T17:14:57.605896Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:3 Blob count: 1 2025-04-17T17:14:57.608451Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:4 Blob count: 1 2025-04-17T17:14:57.622361Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:4 Blob count: 1 2025-04-17T17:14:57.635616Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 1 2025-04-17T17:14:57.651129Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 1 2025-04-17T17:14:57.653988Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:6 Blob count: 1 2025-04-17T17:14:57.668987Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:6 Blob count: 1 2025-04-17T17:14:57.671895Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:7 Blob count: 1 2025-04-17T17:14:57.686658Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:7 Blob count: 1 2025-04-17T17:14:57.700218Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:8 Blob count: 1 2025-04-17T17:14:57.714188Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:8 Blob count: 1 2025-04-17T17:14:57.717028Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:9 Blob count: 1 2025-04-17T17:14:57.736903Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:9 Blob count: 1 2025-04-17T17:14:57.739654Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:10 Blob count: 1 2025-04-17T17:14:57.756800Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:10 Blob count: 1 2025-04-17T17:14:57.774388Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:11 Blob count: 1 2025-04-17T17:14:57.793021Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:11 Blob count: 1 2025-04-17T17:14:57.795718Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:12 Blob count: 1 2025-04-17T17:14:57.810257Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:12 Blob count: 1 2025-04-17T17:14:57.813024Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:13 Blob count: 1 2025-04-17T17:14:57.826523Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:13 Blob count: 1 2025-04-17T17:14:57.837798Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:14 Blob count: 1 2025-04-17T17:14:57.853689Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:14 Blob count: 1 2025-04-17T17:14:57.856448Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:15 Blob count: 1 2025-04-17T17:14:57.870078Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:15 Blob count: 1 2025-04-17T17:14:57.873283Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:16 Blob count: 1 2025-04-17T17:14:57.886245Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:16 Blob count: 1 2025-04-17T17:14:57.904358Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:17 Blob count: 1 2025-04-17T17:14:57.918258Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:17 Blob count: 1 2025-04-17T17:14:57.921141Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:18 Blob count: 1 2025-04-17T17:14:57.933617Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:18 Blob count: 1 2025-04-17T17:14:57.937739Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:19 Blob count: 1 2025-04-17T17:14:57.952964Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:19 Blob count: 1 2025-04-17T17:14:57.965754Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:20 Blob count: 1 2025-04-17T17:14:57.978525Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:20 Blob count: 1 2025-04-17T17:14:57.981455Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:21 Blob count: 1 2025-04-17T17:14:57.997876Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:21 Blob count: 1 2025-04-17T17:14:58.420646Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:22 Blob count: 1 2025-04-17T17:14:58.437674Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:22 Blob count: 1 >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs [GOOD] >> TPQTestSlow::TestOnDiskStoredSourceIds [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> TPQTestSlow::TestOnDiskStoredSourceIds [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:108:2057] recipient: [1:101:2135] 2025-04-17T17:13:35.041036Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:13:35.041152Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927938 is [1:153:2174] sender: [1:154:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:179:2057] recipient: [1:14:2061] 2025-04-17T17:13:35.062673Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:13:35.080603Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-17T17:13:35.081674Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2025-04-17T17:13:35.084069Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:185:2198] 2025-04-17T17:13:35.086522Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:186:2199] 2025-04-17T17:13:35.087716Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:186:2199] 2025-04-17T17:13:35.093664Z node 1 :PERSQUEUE INFO: new Cookie default|82fb9560-859afc96-1f6dab5a-57e696a0_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:13:35.099484Z node 1 :PERSQUEUE INFO: new Cookie default|4f2a6df0-b2b66e9f-6b840e8a-85d747fc_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:13:35.123171Z node 1 :PERSQUEUE INFO: new Cookie default|19805e44-bd1744d2-c90bc4c8-400b9a50_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:13:35.131445Z node 1 :PERSQUEUE INFO: new Cookie default|712cf8ba-148c8227-1606768a-b5fe06d6_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:13:35.138605Z node 1 :PERSQUEUE INFO: new Cookie default|2ba87c47-a1105773-8285bbd6-dd568b11_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:13:35.146270Z node 1 :PERSQUEUE INFO: new Cookie default|ae8748ea-b80ba52d-c2270b64-e999d493_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:108:2057] recipient: [2:101:2135] 2025-04-17T17:13:35.942963Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:13:35.943042Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927938 is [2:153:2174] sender: [2:154:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:177:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:107:2139]) on event NKikimr::TEvPersQueue::TEvUpdateConfigBuilder ! Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:179:2057] recipient: [2:99:2134] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:182:2057] recipient: [2:181:2193] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:183:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:184:2194] sender: [2:185:2057] recipient: [2:181:2193] 2025-04-17T17:13:35.988553Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:13:35.988632Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info !Reboot 72057594037927937 (actor [2:107:2139]) rebooted! !Reboot 72057594037927937 (actor [2:107:2139]) tablet resolver refreshed! new actor is[2:184:2194] Leader for TabletID 72057594037927937 is [2:184:2194] sender: [2:258:2057] recipient: [2:14:2061] 2025-04-17T17:13:37.826420Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:13:37.827144Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-04-17T17:13:37.828234Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:264:2256] 2025-04-17T17:13:37.829906Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [2:264:2256] 2025-04-17T17:13:37.831797Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:265:2257] 2025-04-17T17:13:37.833034Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [2:265:2257] 2025-04-17T17:13:37.861340Z node 2 :PERSQUEUE INFO: new Cookie default|144be49b-ac6719da-f0adb154-d53db980_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:13:37.878761Z node 2 :PERSQUEUE INFO: new Cookie default|b6b5e958-a249f145-37963ca2-cc9da4cf_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:13:37.916431Z node 2 :PERSQUEUE INFO: new Cookie default|cbcbfb97-bf1a64a3-26e4f4c0-d89b5731_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:13:37.927158Z node 2 :PERSQUEUE INFO: new Cookie default|28a6c15-641a71bb-960bb76a-e9ad7984_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:13:37.937303Z node 2 :PERSQUEUE INFO: new Cookie default|567f1135-32bda981-764f7fd8-f4f80e20_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:13:37.947666Z node 2 :PERSQUEUE INFO: new Cookie default|ce419bce-6ee8eefe-c2ccd897-f82040d9_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:108:2057] recipient: [3:101:2135] 2025-04-17T17:13:38.726032Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:13:38.726118Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927938 is [3:153:2174] sender: [3:154:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:179:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:107:2139]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:181:2057] recipient: [3:99:2134] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:184:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:185:2057] recipient: [3:183:2195] Leader for TabletID 72057594037927937 is [3:186:2196] sender: [3:187:2057] recipient: [3:183:2195] 2025-04-17T17:13:38.808223Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:13:38.808309Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info !Reboot 72057594037927937 (actor [3:107:2139]) rebooted! !Reboot 72057594037927937 (actor [3:107:2139]) tablet resolver refreshed! new actor is[3:186:2196] Leader for TabletID 72057594037927937 is [3:186:2196] sender: [3:260:2057] recipient: [3:14:2061] 2025-04-17T17:13:40.630109Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:13:40.631085Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 3 actor [3:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } 2025-04-17T17:13:40.632080Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:266:2258] 2025-04-17T17:13:40.634700Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [3:266:2258] 2025-04-17T17:13:40.640908Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:267:2259] 2025-04-17T17:13:40.643084Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [3:267:2259] 2025-04-17T17:13:40.662249Z node 3 :PERSQUEUE INFO: new Cookie default|4909fb29-2242e4d0-5e44341 ... 765490Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 47 actor [47:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 47 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 47 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 47 Important: false } 2025-04-17T17:14:56.766473Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [47:185:2198] 2025-04-17T17:14:56.769191Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [47:185:2198] 2025-04-17T17:14:56.770832Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [47:186:2199] 2025-04-17T17:14:56.773104Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [47:186:2199] 2025-04-17T17:14:56.783174Z node 47 :PERSQUEUE INFO: new Cookie default|60d6549a-e1af5a62-7271e2fa-3ac74366_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:14:56.789481Z node 47 :PERSQUEUE INFO: new Cookie default|664ec29d-208e604f-cf0e93eb-9996fee7_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:14:56.825186Z node 47 :PERSQUEUE INFO: new Cookie default|1db24eb-d32c9388-b369fb33-bff7572f_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:14:56.836041Z node 47 :PERSQUEUE INFO: new Cookie default|aad7c860-778bd31-20101ca1-1034b975_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:14:56.845734Z node 47 :PERSQUEUE INFO: new Cookie default|865c2962-12684445-2f6b6faa-45ee0069_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:14:56.858795Z node 47 :PERSQUEUE INFO: new Cookie default|7723cdf9-6bb7b301-126a05a7-fb6754ef_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default !Reboot 72057594037927937 (actor [47:107:2139]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [47:107:2139] sender: [47:284:2057] recipient: [47:99:2134] Leader for TabletID 72057594037927937 is [47:107:2139] sender: [47:286:2057] recipient: [47:14:2061] Leader for TabletID 72057594037927937 is [47:107:2139] sender: [47:288:2057] recipient: [47:287:2281] Leader for TabletID 72057594037927937 is [47:289:2282] sender: [47:290:2057] recipient: [47:287:2281] 2025-04-17T17:14:56.919674Z node 47 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:14:56.919761Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:14:56.920925Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [47:338:2323] 2025-04-17T17:14:56.923590Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [47:339:2324] 2025-04-17T17:14:56.935395Z node 47 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T17:14:56.935478Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [47:339:2324] 2025-04-17T17:14:56.938241Z node 47 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T17:14:56.938323Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [47:338:2323] !Reboot 72057594037927937 (actor [47:107:2139]) rebooted! !Reboot 72057594037927937 (actor [47:107:2139]) tablet resolver refreshed! new actor is[47:289:2282] Leader for TabletID 72057594037927937 is [47:289:2282] sender: [47:386:2057] recipient: [47:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:103:2057] recipient: [48:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:103:2057] recipient: [48:101:2135] Leader for TabletID 72057594037927937 is [48:107:2139] sender: [48:108:2057] recipient: [48:101:2135] 2025-04-17T17:14:58.940671Z node 48 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:14:58.940751Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [48:149:2057] recipient: [48:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [48:149:2057] recipient: [48:147:2170] Leader for TabletID 72057594037927938 is [48:153:2174] sender: [48:154:2057] recipient: [48:147:2170] Leader for TabletID 72057594037927937 is [48:107:2139] sender: [48:179:2057] recipient: [48:14:2061] 2025-04-17T17:14:58.962655Z node 48 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:14:58.963576Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 48 actor [48:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 48 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 48 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 48 Important: false } 2025-04-17T17:14:58.964583Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [48:185:2198] 2025-04-17T17:14:58.967382Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [48:185:2198] 2025-04-17T17:14:58.969531Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [48:186:2199] 2025-04-17T17:14:58.971566Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [48:186:2199] 2025-04-17T17:14:58.988767Z node 48 :PERSQUEUE INFO: new Cookie default|baed83e3-116e3fe-aa113fa4-c917896_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:14:58.999101Z node 48 :PERSQUEUE INFO: new Cookie default|383350b9-5139ca41-6d82a309-7ea80f73_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:14:59.062196Z node 48 :PERSQUEUE INFO: new Cookie default|84a10e25-32e0a052-c5a63183-d926eb2a_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:14:59.076185Z node 48 :PERSQUEUE INFO: new Cookie default|921fbe18-6b722e62-12ee134e-69caebea_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:14:59.097714Z node 48 :PERSQUEUE INFO: new Cookie default|724ba424-b4206320-3e94c5e3-297e829f_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:14:59.127383Z node 48 :PERSQUEUE INFO: new Cookie default|64f87c30-a0df85f5-e8c39b85-8b11b900_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:103:2057] recipient: [49:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:103:2057] recipient: [49:101:2135] Leader for TabletID 72057594037927937 is [49:107:2139] sender: [49:108:2057] recipient: [49:101:2135] 2025-04-17T17:14:59.867926Z node 49 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:14:59.868007Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [49:149:2057] recipient: [49:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [49:149:2057] recipient: [49:147:2170] Leader for TabletID 72057594037927938 is [49:153:2174] sender: [49:154:2057] recipient: [49:147:2170] Leader for TabletID 72057594037927937 is [49:107:2139] sender: [49:177:2057] recipient: [49:14:2061] 2025-04-17T17:14:59.895418Z node 49 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:14:59.896451Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 49 actor [49:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 49 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 49 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 49 Important: false } 2025-04-17T17:14:59.897530Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [49:183:2196] 2025-04-17T17:14:59.901336Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [49:183:2196] 2025-04-17T17:14:59.903748Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [49:184:2197] 2025-04-17T17:14:59.912259Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [49:184:2197] 2025-04-17T17:14:59.926240Z node 49 :PERSQUEUE INFO: new Cookie default|b10179c7-af8f9b54-2934319e-8cb0f7ca_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:14:59.934713Z node 49 :PERSQUEUE INFO: new Cookie default|8677fad2-6dd78641-8a71fec6-5e5a3ebe_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:14:59.979181Z node 49 :PERSQUEUE INFO: new Cookie default|a6972e96-f1915ea5-ae3ab37a-1222363c_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:14:59.990301Z node 49 :PERSQUEUE INFO: new Cookie default|63a1e18a-b51b63e1-3d2b1426-cbb80e25_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:15:00.001463Z node 49 :PERSQUEUE INFO: new Cookie default|6f074f40-744e04ed-516dd456-a285f7ae_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:15:00.014140Z node 49 :PERSQUEUE INFO: new Cookie default|bd71eb97-b442211-159ee32b-84657e02_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default >> TableCreation::CreateOldTable [GOOD] >> TopicAutoscaling::PartitionSplit_DistributedTxCommit_CheckSessionResetAfterCommit [GOOD] >> TopicAutoscaling::PartitionSplit_DistributedTxCommit_CheckOffsetCommitForDifferentCases_SplitedTopic >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowBySize [GOOD] >> AsyncIndexChangeExchange::ShouldNotReorderChangesOnRace ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minstep/unittest >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs [GOOD] Test command err: 2025-04-17T17:14:42.994683Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:14:42.998546Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:14:42.998650Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/00124f/r3tmp/tmp1md5VD/pdisk_1.dat 2025-04-17T17:14:43.580403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:14:43.591971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T17:14:43.593621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:14:43.610806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-17T17:14:43.626578Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 HANDLE EvProposeTransaction marker# C0 2025-04-17T17:14:43.626645Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 step# 500 Status# 16 SEND to# [1:409:2404] Proxy marker# C1 2025-04-17T17:14:43.687547Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:14:43.699114Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Merged config: { } 2025-04-17T17:14:43.745054Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvRegisterNode from [1:333:2374] HiveId: 72057594037968897 ServicedDomains { SchemeShard: 72057594046644480 PathId: 1 } TabletAvailability { Type: Mediator Priority: 0 } TabletAvailability { Type: Dummy Priority: 0 } TabletAvailability { Type: KeyValue Priority: 0 } TabletAvailability { Type: Coordinator Priority: 0 } TabletAvailability { Type: Hive Priority: 0 } TabletAvailability { Type: SchemeShard Priority: 0 } TabletAvailability { Type: DataShard Priority: 0 } TabletAvailability { Type: PersQueue Priority: 0 } TabletAvailability { Type: PersQueueReadBalancer Priority: 0 } TabletAvailability { Type: Kesus Priority: 0 } TabletAvailability { Type: SysViewProcessor Priority: 0 } TabletAvailability { Type: ColumnShard Priority: 0 } TabletAvailability { Type: SequenceShard Priority: 0 } TabletAvailability { Type: ReplicationController Priority: 0 } TabletAvailability { Type: StatisticsAggregator Priority: 0 } 2025-04-17T17:14:43.745227Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Execute 2025-04-17T17:14:43.745400Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:14:43.745453Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-04-17T17:14:43.745491Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-04-17T17:14:43.745534Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-04-17T17:14:43.745568Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-04-17T17:14:43.745687Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:14:43.745955Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-04-17T17:14:43.746016Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-04-17T17:14:43.746060Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2025-04-17T17:14:43.746104Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-04-17T17:14:43.746272Z node 1 :HIVE DEBUG: HIVE#72057594037968897 TEvInterconnect::TEvNodeInfo NodeId 1 Location DataCenter: "1" Module: "1" Rack: "1" Unit: "1" 2025-04-17T17:14:43.760798Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Complete 2025-04-17T17:14:43.760901Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Node(1) Ping([1:333:2374]) 2025-04-17T17:14:43.761000Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-04-17T17:14:43.761749Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle::TEvSyncTablets 2025-04-17T17:14:43.761882Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxSyncTablets([1:333:2374])::Execute 2025-04-17T17:14:43.761945Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-04-17T17:14:43.762024Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxSyncTablets([1:333:2374])::Complete 2025-04-17T17:14:43.762209Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvStatus for Node 1: Status: 0 StartTime: 0 ResourceMaximum { Memory: 270443339776 } 2025-04-17T17:14:43.762281Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(1)::Execute 2025-04-17T17:14:43.762334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:14:43.773282Z node 1 :HIVE DEBUG: HIVE#72057594037968897 AddRegisteredDataCentersNode(1, 1) 2025-04-17T17:14:43.773386Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-04-17T17:14:43.773432Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-04-17T17:14:43.773653Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-04-17T17:14:43.773696Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-04-17T17:14:43.773739Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2025-04-17T17:14:43.773779Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-04-17T17:14:43.784628Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(1)::Complete 2025-04-17T17:14:43.784748Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-04-17T17:14:43.857690Z node 1 :TX_COORDINATOR DEBUG: Transaction 1 has been planned 2025-04-17T17:14:43.857807Z node 1 :TX_COORDINATOR DEBUG: Planned transaction 1 for mediator 72057594046382081 tablet 72057594046644480 2025-04-17T17:14:43.858034Z node 1 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 1000 in 0.500000s at 0.950000s 2025-04-17T17:14:43.858281Z node 1 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 500, txid# 1 marker# C2 2025-04-17T17:14:43.858336Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 stepId# 500 Status# 17 SEND EvProposeTransactionStatus to# [1:409:2404] Proxy 2025-04-17T17:14:43.858903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T17:14:43.860384Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-04-17T17:14:43.860486Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2025-04-17T17:14:43.860520Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 acknowledged 2025-04-17T17:14:43.860549Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 acknowledged 2025-04-17T17:14:43.860744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:14:43.860784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2025-04-17T17:14:43.861654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2025-04-17T17:14:43.940071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/table-1, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:14:43.970861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T17:14:43.970959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:14:43.971881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table-1 2025-04-17T17:14:43.975376Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvHive::TEvCreateTablet(DataShard(72057594046644480,1)) 2025-04-17T17:14:44.034175Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute Owner: 72057594046644480 OwnerIdx: 1 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-04-17T17:14:44.034280Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Hive 72057594037968897 allocated TabletId 72075186224037888 from TabletIdIndex 65536 2025-04-17T17:14:44.038064Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for type DataShard: {} 2025-04-17T17:14:44.038174Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for object (72057594046644480,2): {} 2025-04-17T17:14:44.038243Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for profile 'default': {Memory: 1048576} 2025-04-17T17:14:44.049671Z node 1 :HIVE DEBUG: HIVE#72057594037968897 CreateTabletFollowers Tablet DataShard.72075186224037888.Leader.0 2025-04-17T17:14:44.050525Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 Status: OK 2025-04-17T17:14:44.050696Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::AssignTabletGroups TEvControllerSelectGroups tablet 72075186224037888 GroupParameters { StoragePoolSpecifier { Name: "/Root:test" } } ReturnAllMatchingGroups: true 2025-04-17T17:14:44.051677Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Connected to tablet 72057594037932033 from tablet 72057594037968897 2025-04-17T17:14:44.052041Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle TEvControllerSelectGroupsResult: success Status: OK NewStyleQuerySupported: true MatchingGroups { Groups { ErasureSpecies: 0 GroupID: 2181038080 StoragePoolName: "/Root:test" AssuredResources { } CurrentResources { } PhysicalGroup: true Decommitted: false } } 2025-04-17T17:14:44.052184Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923048996544}(72075186224037888,HIVE_REASSIGN_REASON_NO,[]) 2025-04-17T17:14:44.052266Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923048996544}: tablet 72075186224037888 channel 0 assigned to group 2181038080 2025-04-17T17:14:44.052469Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923048996544}: tablet 72075186224037888 channel 1 assigned to group 2181038080 2025-04-17T17:14:44.052564Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923048996544}: tablet 72075186224037888 channel 2 assigned to group 21810380 ... 037889 2025-04-17T17:14:59.025361Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715665 ssId 72057594046644480 seqNo 2:4 2025-04-17T17:14:59.025410Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715665 at tablet 72075186224037889 2025-04-17T17:14:59.025635Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:14:59.025827Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:14:59.025890Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T17:14:59.025919Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:14:59.025957Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 1 2025-04-17T17:14:59.036799Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-17T17:14:59.036924Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-17T17:14:59.038514Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715665 HANDLE EvProposeTransaction marker# C0 2025-04-17T17:14:59.038588Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715665 step# 33000 Status# 16 SEND to# [2:409:2404] Proxy marker# C1 2025-04-17T17:14:59.116592Z node 2 :TX_COORDINATOR DEBUG: Transaction 281474976715665 has been planned 2025-04-17T17:14:59.116731Z node 2 :TX_COORDINATOR DEBUG: Planned transaction 281474976715665 for mediator 72057594046382081 tablet 72057594046644480 2025-04-17T17:14:59.116990Z node 2 :TX_COORDINATOR DEBUG: Planned transaction 281474976715665 for mediator 72057594046382081 tablet 72075186224037889 2025-04-17T17:14:59.117276Z node 2 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 33500 in 0.500000s at 33.450000s 2025-04-17T17:14:59.117778Z node 2 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 33000, txid# 281474976715665 marker# C2 2025-04-17T17:14:59.117868Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715665 stepId# 33000 Status# 17 SEND EvProposeTransactionStatus to# [2:409:2404] Proxy 2025-04-17T17:14:59.118569Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715665 at step 33000 at tablet 72075186224037889 { Transactions { TxId: 281474976715665 AckTo { RawX1: 0 RawX2: 0 } } Step: 33000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-04-17T17:14:59.118631Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T17:14:59.118845Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 33000, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T17:14:59.119310Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T17:14:59.119367Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:14:59.119416Z node 2 :TX_DATASHARD DEBUG: Found ready operation [33000:281474976715665] in PlanQueue unit at 72075186224037889 2025-04-17T17:14:59.119630Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 33000:281474976715665 keys extracted: 0 2025-04-17T17:14:59.119782Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T17:14:59.120067Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T17:14:59.132564Z node 2 :TX_DATASHARD INFO: Trying to DROP TABLE at 72075186224037889 2025-04-17T17:14:59.133254Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:14:59.135361Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 33000} 2025-04-17T17:14:59.135442Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-17T17:14:59.136178Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-04-17T17:14:59.136286Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:54] persistent tx 281474976715665 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2025-04-17T17:14:59.136332Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:54] persistent tx 281474976715665 for mediator 72057594046382081 tablet 72075186224037889 removed=1 2025-04-17T17:14:59.136361Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:54] persistent tx 281474976715665 for mediator 72057594046382081 acknowledged 2025-04-17T17:14:59.136401Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:54] persistent tx 281474976715665 acknowledged 2025-04-17T17:14:59.136755Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-17T17:14:59.136826Z node 2 :TX_DATASHARD DEBUG: Complete [33000 : 281474976715665] from 72075186224037889 at tablet 72075186224037889 send result to client [2:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T17:14:59.136891Z node 2 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715665 state PreOffline TxInFly 0 2025-04-17T17:14:59.137006Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T17:14:59.138007Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715665, done: 0, blocked: 1 2025-04-17T17:14:59.141244Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715665 datashard 72075186224037889 state PreOffline 2025-04-17T17:14:59.141327Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-04-17T17:14:59.141877Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715665:0 2025-04-17T17:14:59.141989Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715665, publications: 1, subscribers: 1 2025-04-17T17:14:59.142612Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715665, subscribers: 1 2025-04-17T17:14:59.143251Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-17T17:14:59.144237Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=NzVlZGNiMDUtYzE0NTE2MmMtNTE2ZTZjOGItNmNkYzMwODc= 2025-04-17 17:14:59.144 INFO ydb-core-tx-datashard-ut_minstep(pid=66646, tid=0x00007F4D3077BD00) [core exec] yql_execution.cpp:133: Completed async execution for node #42 2025-04-17T17:14:59.144361Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=NzVlZGNiMDUtYzE0NTE2MmMtNTE2ZTZjOGItNmNkYzMwODc= 2025-04-17 17:14:59.144 INFO ydb-core-tx-datashard-ut_minstep(pid=66646, tid=0x00007F4D3077BD00) [core exec] yql_execution.cpp:153: State is ExecutionComplete after apply async changes for node #42 2025-04-17T17:14:59.144438Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=NzVlZGNiMDUtYzE0NTE2MmMtNTE2ZTZjOGItNmNkYzMwODc= 2025-04-17 17:14:59.144 INFO ydb-core-tx-datashard-ut_minstep(pid=66646, tid=0x00007F4D3077BD00) [core exec] yql_execution.cpp:59: Begin, root #43 2025-04-17T17:14:59.144487Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=NzVlZGNiMDUtYzE0NTE2MmMtNTE2ZTZjOGItNmNkYzMwODc= 2025-04-17 17:14:59.144 INFO ydb-core-tx-datashard-ut_minstep(pid=66646, tid=0x00007F4D3077BD00) [core exec] yql_execution.cpp:72: Collect unused nodes for root #43, status: Ok 2025-04-17T17:14:59.144538Z node 2 :KQP_YQL TRACE: SessionId: ydb://session/3?node_id=2&id=NzVlZGNiMDUtYzE0NTE2MmMtNTE2ZTZjOGItNmNkYzMwODc= 2025-04-17 17:14:59.144 TRACE ydb-core-tx-datashard-ut_minstep(pid=66646, tid=0x00007F4D3077BD00) [core exec] yql_execution.cpp:387: {0}, callable #43 2025-04-17T17:14:59.144604Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=NzVlZGNiMDUtYzE0NTE2MmMtNTE2ZTZjOGItNmNkYzMwODc= 2025-04-17 17:14:59.144 INFO ydb-core-tx-datashard-ut_minstep(pid=66646, tid=0x00007F4D3077BD00) [core exec] yql_execution.cpp:577: Node #43 finished execution 2025-04-17T17:14:59.144674Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=NzVlZGNiMDUtYzE0NTE2MmMtNTE2ZTZjOGItNmNkYzMwODc= 2025-04-17 17:14:59.144 INFO ydb-core-tx-datashard-ut_minstep(pid=66646, tid=0x00007F4D3077BD00) [core exec] yql_execution.cpp:594: Node #43 created 0 trackable nodes: 2025-04-17T17:14:59.144720Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=NzVlZGNiMDUtYzE0NTE2MmMtNTE2ZTZjOGItNmNkYzMwODc= 2025-04-17 17:14:59.144 INFO ydb-core-tx-datashard-ut_minstep(pid=66646, tid=0x00007F4D3077BD00) [core exec] yql_execution.cpp:87: Finish, output #43, status: Ok 2025-04-17T17:14:59.144769Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=NzVlZGNiMDUtYzE0NTE2MmMtNTE2ZTZjOGItNmNkYzMwODc= 2025-04-17 17:14:59.144 INFO ydb-core-tx-datashard-ut_minstep(pid=66646, tid=0x00007F4D3077BD00) [core exec] yql_execution.cpp:93: Creating finalizing transformer, output #43 2025-04-17T17:14:59.144933Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=NzVlZGNiMDUtYzE0NTE2MmMtNTE2ZTZjOGItNmNkYzMwODc= 2025-04-17 17:14:59.144 NOTE ydb-core-tx-datashard-ut_minstep(pid=66646, tid=0x00007F4D3077BD00) [common provider] yql_provider_gateway.cpp:21:
: Info: Execution, code: 1060 2025-04-17T17:14:59.144987Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=NzVlZGNiMDUtYzE0NTE2MmMtNTE2ZTZjOGItNmNkYzMwODc= 2025-04-17 17:14:59.144 NOTE ydb-core-tx-datashard-ut_minstep(pid=66646, tid=0x00007F4D3077BD00) [common provider] yql_provider_gateway.cpp:21:
:1:12: Info: Executing DROP TABLE 2025-04-17T17:14:59.145024Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=NzVlZGNiMDUtYzE0NTE2MmMtNTE2ZTZjOGItNmNkYzMwODc= 2025-04-17 17:14:59.145 NOTE ydb-core-tx-datashard-ut_minstep(pid=66646, tid=0x00007F4D3077BD00) [common provider] yql_provider_gateway.cpp:21:
: Info: Success, code: 4 2025-04-17T17:14:59.200897Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-17T17:14:59.201086Z node 2 :TX_DATASHARD INFO: 72075186224037889 Initiating switch from PreOffline to Offline state 2025-04-17T17:14:59.203240Z node 2 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-04-17T17:14:59.204306Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-04-17T17:14:59.204736Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186224037889 2025-04-17T17:14:59.204799Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute Tablet 72075186224037889 2025-04-17T17:14:59.204914Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Tablet(DataShard.72075186224037889.Leader.1) VolatileState: Running -> Stopped (Node 2) 2025-04-17T17:14:59.205043Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Sending TEvStopTablet(DataShard.72075186224037889.Leader.1 gen 1) to node 2 2025-04-17T17:14:59.205160Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() result Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_PQv1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldProperlyOrderConflictingTransactionsMvcc-UseSink [GOOD] Test command err: 2025-04-17T17:13:09.011727Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:13:09.020778Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:13:09.020868Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/001154/r3tmp/tmpZVghv6/pdisk_1.dat 2025-04-17T17:13:10.001661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:13:10.078105Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:13:10.128941Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:13:10.129069Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:13:10.140605Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:13:10.223005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:13:10.285240Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:13:10.286196Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:13:10.286595Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T17:13:10.286812Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:13:10.329104Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:13:10.329735Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:13:10.329888Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:13:10.331357Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:13:10.331422Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:13:10.331476Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:13:10.331778Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:13:10.331900Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:13:10.331981Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T17:13:10.344586Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:13:10.376713Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:13:10.376883Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:13:10.376993Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T17:13:10.377025Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:13:10.377064Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:13:10.377096Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:13:10.377292Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:10.377333Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:10.377546Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:13:10.377635Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:13:10.377740Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:13:10.377787Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:13:10.377827Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T17:13:10.377858Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:13:10.377901Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:13:10.377945Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:13:10.377980Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:13:10.378083Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:10.378114Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:10.378148Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T17:13:10.378491Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T17:13:10.378528Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:13:10.378618Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:13:10.378812Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T17:13:10.378862Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:13:10.378938Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:13:10.378978Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:13:10.379009Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T17:13:10.379038Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T17:13:10.379085Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:13:10.379315Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T17:13:10.379367Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T17:13:10.379399Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:13:10.379436Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:13:10.379474Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T17:13:10.379501Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:13:10.379528Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T17:13:10.379553Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:13:10.379574Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T17:13:10.380801Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T17:13:10.380847Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:13:10.392742Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:13:10.392837Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:13:10.392865Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:13:10.392900Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-17T17:13:10.392966Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T17:13:10.562131Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:10.562179Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:10.562223Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T17:13:10.562556Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-17T17:13:10.562585Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:13:10.562681Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:13:10.562719Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-17T17:13:10.562751Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-17T17:13:10.562777Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-17T17:13:10.584797Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T17:13:10.584892Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:13:10.585731Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:10.585771Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:10.585829Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:13:1 ... lan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T17:14:58.575961Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2025-04-17T17:14:58.576023Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-17T17:14:58.576095Z node 15 :TX_DATASHARD DEBUG: Found ready candidate operation [0:8] at 72075186224037888 for ExecuteRead 2025-04-17T17:14:58.576580Z node 15 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=15&id=ZGY0MmMxMGUtYTZhYjBhZmYtYjQ3OTYxYTQtODAxNTIwMzc=, workerId: [15:1134:2910], local sessions count: 1 2025-04-17T17:14:58.576827Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [15:61:2108], Recipient [15:1061:2855]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715666 LockNode: 15 Status: STATUS_NOT_FOUND 2025-04-17T17:14:58.577409Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [15:667:2571], Recipient [15:667:2571]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:14:58.577446Z node 15 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:14:58.577494Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:14:58.577525Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-17T17:14:58.577554Z node 15 :TX_DATASHARD DEBUG: Return cached ready operation [0:8] at 72075186224037888 2025-04-17T17:14:58.577583Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit ExecuteRead 2025-04-17T17:14:58.577686Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 2, request: { ReadId: 3 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW } 2025-04-17T17:14:58.578075Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3001/18446744073709551615 2025-04-17T17:14:58.578118Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:594:2519], 3} after executionsCount# 2 2025-04-17T17:14:58.578153Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:594:2519], 3} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T17:14:58.578285Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:594:2519], 3} finished in read 2025-04-17T17:14:58.578351Z node 15 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-04-17T17:14:58.578381Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit ExecuteRead 2025-04-17T17:14:58.578407Z node 15 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T17:14:58.578436Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit CompletedOperations 2025-04-17T17:14:58.578475Z node 15 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-04-17T17:14:58.578496Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T17:14:58.578518Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:8] at 72075186224037888 has finished 2025-04-17T17:14:58.578546Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:14:58.578588Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-17T17:14:58.578653Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:14:58.578716Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:14:58.578917Z node 15 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=15&id=ZjBiOTg4NDAtNmY0MjVjNjQtNjQxYzYyNzQtODVlMTc0ZWM=, workerId: [15:1163:2931], local sessions count: 0 2025-04-17T17:14:58.580337Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [15:594:2519], Recipient [15:667:2571]: NKikimrTxDataShard.TEvRead ReadId: 4 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW KeysSize: 1 2025-04-17T17:14:58.580508Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-17T17:14:58.580619Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:9] at 72075186224037888 on unit CheckRead 2025-04-17T17:14:58.580762Z node 15 :TX_DATASHARD TRACE: Execution status for [0:9] at 72075186224037888 is Executed 2025-04-17T17:14:58.580832Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:9] at 72075186224037888 executing on unit CheckRead 2025-04-17T17:14:58.580907Z node 15 :TX_DATASHARD TRACE: Add [0:9] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-17T17:14:58.580967Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:9] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-17T17:14:58.581024Z node 15 :TX_DATASHARD TRACE: Activated operation [0:9] at 72075186224037888 2025-04-17T17:14:58.581083Z node 15 :TX_DATASHARD TRACE: Execution status for [0:9] at 72075186224037888 is Executed 2025-04-17T17:14:58.581113Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:9] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T17:14:58.581136Z node 15 :TX_DATASHARD TRACE: Add [0:9] at 72075186224037888 to execution unit ExecuteRead 2025-04-17T17:14:58.581158Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:9] at 72075186224037888 on unit ExecuteRead 2025-04-17T17:14:58.581296Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 4 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW } 2025-04-17T17:14:58.581659Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3001/18446744073709551615 2025-04-17T17:14:58.581736Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:594:2519], 4} after executionsCount# 1 2025-04-17T17:14:58.581826Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:594:2519], 4} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T17:14:58.582085Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:594:2519], 4} finished in read 2025-04-17T17:14:58.582188Z node 15 :TX_DATASHARD TRACE: Execution status for [0:9] at 72075186224037888 is Executed 2025-04-17T17:14:58.582215Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:9] at 72075186224037888 executing on unit ExecuteRead 2025-04-17T17:14:58.582241Z node 15 :TX_DATASHARD TRACE: Add [0:9] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T17:14:58.582269Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:9] at 72075186224037888 on unit CompletedOperations 2025-04-17T17:14:58.582319Z node 15 :TX_DATASHARD TRACE: Execution status for [0:9] at 72075186224037888 is Executed 2025-04-17T17:14:58.582344Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:9] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T17:14:58.582377Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:9] at 72075186224037888 has finished 2025-04-17T17:14:58.582448Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-17T17:14:58.583411Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [15:594:2519], Recipient [15:667:2571]: NKikimrTxDataShard.TEvRead ReadId: 5 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW KeysSize: 1 2025-04-17T17:14:58.583593Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-17T17:14:58.583717Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:10] at 72075186224037888 on unit CheckRead 2025-04-17T17:14:58.583852Z node 15 :TX_DATASHARD TRACE: Execution status for [0:10] at 72075186224037888 is Executed 2025-04-17T17:14:58.583917Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 72075186224037888 executing on unit CheckRead 2025-04-17T17:14:58.583980Z node 15 :TX_DATASHARD TRACE: Add [0:10] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-17T17:14:58.584041Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:10] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-17T17:14:58.584094Z node 15 :TX_DATASHARD TRACE: Activated operation [0:10] at 72075186224037888 2025-04-17T17:14:58.584210Z node 15 :TX_DATASHARD TRACE: Execution status for [0:10] at 72075186224037888 is Executed 2025-04-17T17:14:58.584240Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T17:14:58.584265Z node 15 :TX_DATASHARD TRACE: Add [0:10] at 72075186224037888 to execution unit ExecuteRead 2025-04-17T17:14:58.584290Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:10] at 72075186224037888 on unit ExecuteRead 2025-04-17T17:14:58.584467Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 5 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW } 2025-04-17T17:14:58.584841Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3000/18446744073709551615 2025-04-17T17:14:58.584920Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:594:2519], 5} after executionsCount# 1 2025-04-17T17:14:58.585020Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:594:2519], 5} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T17:14:58.585270Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:594:2519], 5} finished in read 2025-04-17T17:14:58.585367Z node 15 :TX_DATASHARD TRACE: Execution status for [0:10] at 72075186224037888 is Executed 2025-04-17T17:14:58.585398Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 72075186224037888 executing on unit ExecuteRead 2025-04-17T17:14:58.585426Z node 15 :TX_DATASHARD TRACE: Add [0:10] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T17:14:58.585454Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:10] at 72075186224037888 on unit CompletedOperations 2025-04-17T17:14:58.585500Z node 15 :TX_DATASHARD TRACE: Execution status for [0:10] at 72075186224037888 is Executed 2025-04-17T17:14:58.585525Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T17:14:58.585566Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:10] at 72075186224037888 has finished 2025-04-17T17:14:58.585638Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 >> TestKinesisHttpProxy::UnauthorizedGetShardIteratorRequest [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk100 [GOOD] |76.6%| [TA] $(B)/ydb/core/tx/datashard/ut_minstep/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::CreateOldTable [GOOD] Test command err: 2025-04-17T17:14:47.600934Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494331760559620177:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:47.601282Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/0021aa/r3tmp/tmpMGMOHg/pdisk_1.dat 2025-04-17T17:14:48.409299Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:14:48.409403Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:14:48.412658Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:14:48.418790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7210 TServer::EnableGrpc on GrpcPort 7507, node 1 2025-04-17T17:14:48.780631Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:14:48.780651Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:14:48.780657Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:14:48.780804Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T17:14:49.115140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:49.164729Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:14:51.860634Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-17T17:14:51.861529Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-17T17:14:51.867648Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:14:51.867687Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:14:51.867727Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-17T17:14:51.867745Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-17T17:14:51.867760Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-17T17:14:51.867826Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:14:51.875945Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2025-04-17T17:14:51.875953Z node 1 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2025-04-17T17:14:51.875984Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-04-17T17:14:51.876077Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-04-17T17:14:51.876082Z node 1 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2025-04-17T17:14:51.876092Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-04-17T17:14:51.876132Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2025-04-17T17:14:51.876135Z node 1 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2025-04-17T17:14:51.876145Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-04-17T17:14:51.883644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:1, at schemeshard: 72057594046644480 2025-04-17T17:14:51.886705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T17:14:51.887909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T17:14:51.894064Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 } 2025-04-17T17:14:51.894112Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Subscribe on create table tx: 281474976710658 2025-04-17T17:14:51.900900Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 } 2025-04-17T17:14:51.900924Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Subscribe on create table tx: 281474976710660 2025-04-17T17:14:51.900972Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 5 } 2025-04-17T17:14:51.900985Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Subscribe on create table tx: 281474976710659 2025-04-17T17:14:52.037773Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Request: create. Transaction completed: 281474976710658. Doublechecking... 2025-04-17T17:14:52.121375Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Column diff is empty, finishing 2025-04-17T17:14:52.122944Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Request: create. Transaction completed: 281474976710660. Doublechecking... 2025-04-17T17:14:52.122993Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Request: create. Transaction completed: 281474976710659. Doublechecking... 2025-04-17T17:14:52.191359Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Column diff is empty, finishing 2025-04-17T17:14:52.211485Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Column diff is empty, finishing 2025-04-17T17:14:52.212453Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: c653f9f5-f35ce60c-5d174d22-e6a28147, Bootstrap. Database: /dc-1 2025-04-17T17:14:52.248332Z node 1 :KQP_PROXY DEBUG: Request has 18444999163617.303311s seconds to be completed 2025-04-17T17:14:52.250767Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=OTVmZjZlMjQtZDIyMjY1MGMtYzhkMjA1MmUtZTE4MDk4YWU=, workerId: [1:7494331782034457413:2334], database: /dc-1, longSession: 1, local sessions count: 1 2025-04-17T17:14:52.250899Z node 1 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-17T17:14:52.251764Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: c653f9f5-f35ce60c-5d174d22-e6a28147, RunDataQuery: -- TCreateScriptOperationQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $run_script_actor_id AS Text; DECLARE $execution_status AS Int32; DECLARE $execution_mode AS Int32; DECLARE $query_text AS Text; DECLARE $syntax AS Int32; DECLARE $meta AS JsonDocument; DECLARE $lease_duration AS Interval; DECLARE $execution_meta_ttl AS Interval; UPSERT INTO `.metadata/script_executions` (database, execution_id, run_script_actor_id, execution_status, execution_mode, start_ts, query_text, syntax, meta, expire_at) VALUES ($database, $execution_id, $run_script_actor_id, $execution_status, $execution_mode, CurrentUtcTimestamp(), $query_text, $syntax, $meta, CurrentUtcTimestamp() + $execution_meta_ttl); UPSERT INTO `.metadata/script_execution_leases` (database, execution_id, lease_deadline, lease_generation, expire_at) VALUES ($database, $execution_id, CurrentUtcTimestamp() + $lease_duration, 1, CurrentUtcTimestamp() + $execution_meta_ttl); 2025-04-17T17:14:52.252795Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=OTVmZjZlMjQtZDIyMjY1MGMtYzhkMjA1MmUtZTE4MDk4YWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7494331782034457413:2334] 2025-04-17T17:14:52.252839Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [1:7494331782034457415:2468] 2025-04-17T17:14:52.254901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331782034457416:2336], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:52.254974Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:52.255210Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331782034457428:2339], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:52.258165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:2, at schemeshard: 72057594046644480 2025-04-17T17:14:52.288333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494331782034457430:2340], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-17T17:14:52.346093Z node 1 :TX_PROXY ERROR: Actor# [1:7494331782034457473:2501] txid# 281474976710662, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:14:52.586391Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.c ... bletId: 72057594046644480 PathId: 4 } 2025-04-17T17:14:59.524006Z node 2 :KQP_PROXY DEBUG: Table script_execution_leases updater. Subscribe on create table tx: 281474976715659 2025-04-17T17:14:59.526555Z node 2 :KQP_PROXY DEBUG: Table result_sets updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 5 } 2025-04-17T17:14:59.526592Z node 2 :KQP_PROXY DEBUG: Table result_sets updater. Subscribe on create table tx: 281474976715660 2025-04-17T17:14:59.618997Z node 2 :KQP_PROXY DEBUG: Table script_executions updater. Request: create. Transaction completed: 281474976715658. Doublechecking... 2025-04-17T17:14:59.695439Z node 2 :KQP_PROXY DEBUG: Table script_executions updater. Column diff is empty, finishing 2025-04-17T17:14:59.706110Z node 2 :KQP_PROXY DEBUG: Table script_execution_leases updater. Request: create. Transaction completed: 281474976715659. Doublechecking... 2025-04-17T17:14:59.715839Z node 2 :KQP_PROXY DEBUG: Table result_sets updater. Request: create. Transaction completed: 281474976715660. Doublechecking... 2025-04-17T17:14:59.772405Z node 2 :KQP_PROXY DEBUG: Table script_execution_leases updater. Column diff is empty, finishing 2025-04-17T17:14:59.812425Z node 2 :KQP_PROXY DEBUG: Table result_sets updater. Column diff is empty, finishing 2025-04-17T17:14:59.812836Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 86cc7692-22b72a48-885c9968-96eb5b13, Bootstrap. Database: /dc-1 2025-04-17T17:14:59.813049Z node 2 :KQP_PROXY DEBUG: Request has 18444999163609.738580s seconds to be completed 2025-04-17T17:14:59.814749Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=ZTRkMzIzYzctNDU3NGYwMTgtYWU5NmJmYy1iOTUzOWVkNw==, workerId: [2:7494331813180765907:2335], database: /dc-1, longSession: 1, local sessions count: 1 2025-04-17T17:14:59.814871Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-17T17:14:59.815399Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 86cc7692-22b72a48-885c9968-96eb5b13, RunDataQuery: -- TCreateScriptOperationQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $run_script_actor_id AS Text; DECLARE $execution_status AS Int32; DECLARE $execution_mode AS Int32; DECLARE $query_text AS Text; DECLARE $syntax AS Int32; DECLARE $meta AS JsonDocument; DECLARE $lease_duration AS Interval; DECLARE $execution_meta_ttl AS Interval; UPSERT INTO `.metadata/script_executions` (database, execution_id, run_script_actor_id, execution_status, execution_mode, start_ts, query_text, syntax, meta, expire_at) VALUES ($database, $execution_id, $run_script_actor_id, $execution_status, $execution_mode, CurrentUtcTimestamp(), $query_text, $syntax, $meta, CurrentUtcTimestamp() + $execution_meta_ttl); UPSERT INTO `.metadata/script_execution_leases` (database, execution_id, lease_deadline, lease_generation, expire_at) VALUES ($database, $execution_id, CurrentUtcTimestamp() + $lease_duration, 1, CurrentUtcTimestamp() + $execution_meta_ttl); 2025-04-17T17:14:59.817614Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=ZTRkMzIzYzctNDU3NGYwMTgtYWU5NmJmYy1iOTUzOWVkNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [2:7494331813180765907:2335] 2025-04-17T17:14:59.817652Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [2:7494331813180765909:2471] 2025-04-17T17:14:59.818884Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494331813180765910:2337], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:59.818955Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:59.819295Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494331813180765922:2340], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:59.822272Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:2, at schemeshard: 72057594046644480 2025-04-17T17:14:59.837068Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2025-04-17T17:14:59.844311Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494331813180765924:2341], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-04-17T17:14:59.946633Z node 2 :TX_PROXY ERROR: Actor# [2:7494331813180765967:2504] txid# 281474976715662, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:15:00.386465Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [2:7494331813180765908:2336], selfId: [2:7494331791705928696:2217], source: [2:7494331813180765907:2335] 2025-04-17T17:15:00.386879Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 86cc7692-22b72a48-885c9968-96eb5b13, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZTRkMzIzYzctNDU3NGYwMTgtYWU5NmJmYy1iOTUzOWVkNw==, TxId: 2025-04-17T17:15:00.386898Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 86cc7692-22b72a48-885c9968-96eb5b13, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZTRkMzIzYzctNDU3NGYwMTgtYWU5NmJmYy1iOTUzOWVkNw==, TxId: 2025-04-17T17:15:00.386909Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] Create script execution operation. ExecutionId: 86cc7692-22b72a48-885c9968-96eb5b13. Result: SUCCESS. Issues: 2025-04-17T17:15:00.388969Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=ZGFmOWY0YWMtOWYxNzBjMDctNmRiOGVkZDMtNzU3MzhlZGM=, workerId: [2:7494331817475733327:2354], database: dc-1, longSession: 1, local sessions count: 2 2025-04-17T17:15:00.389088Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-17T17:15:00.389374Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=ZTRkMzIzYzctNDU3NGYwMTgtYWU5NmJmYy1iOTUzOWVkNw==, workerId: [2:7494331813180765907:2335], local sessions count: 1 2025-04-17T17:15:00.389776Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=ZGFmOWY0YWMtOWYxNzBjMDctNmRiOGVkZDMtNzU3MzhlZGM=, CurrentExecutionId: 86cc7692-22b72a48-885c9968-96eb5b13, CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 604800.000000s timeout: 604800.000000s cancelAfter: 0.000000s. Send request to target, requestId: 5, targetId: [2:7494331817475733327:2354] 2025-04-17T17:15:00.389815Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 5 timeout: 604800.000000s actor id: [2:7494331817475733330:2542] 2025-04-17T17:15:00.432450Z node 2 :KQP_PROXY DEBUG: TraceId: "01js2by9yf28s7ktm70t51yat0", Request has 18444999163609.119195s seconds to be completed 2025-04-17T17:15:00.434214Z node 2 :KQP_PROXY DEBUG: TraceId: "01js2by9yf28s7ktm70t51yat0", Created new session, sessionId: ydb://session/3?node_id=2&id=YWQ4NGJjZWItYzAwYWY3MjYtNTRkOTJkZTAtZjI2ZjFiYQ==, workerId: [2:7494331817475733345:2364], database: /dc-1, longSession: 1, local sessions count: 2 2025-04-17T17:15:00.434326Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 01js2by9yf28s7ktm70t51yat0 2025-04-17T17:15:00.442090Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Describe result: PathErrorUnknown 2025-04-17T17:15:00.442111Z node 2 :KQP_PROXY NOTICE: Table test_table updater. Creating table 2025-04-17T17:15:00.442136Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Full table path:/dc-1/.test/test_table 2025-04-17T17:15:00.448887Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:1, at schemeshard: 72057594046644480 2025-04-17T17:15:00.450851Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715664 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 10 } 2025-04-17T17:15:00.450891Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Subscribe on create table tx: 281474976715664 2025-04-17T17:15:00.531667Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 86cc7692-22b72a48-885c9968-96eb5b13, Bootstrap. Database: /dc-1 2025-04-17T17:15:00.533280Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 5, sender: [2:7494331813180765904:2469], selfId: [2:7494331791705928696:2217], source: [2:7494331817475733327:2354] 2025-04-17T17:15:00.533372Z node 2 :KQP_PROXY DEBUG: Request has 18444999163609.018259s seconds to be completed 2025-04-17T17:15:00.544355Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=OGFkYjcwNjQtMWE0ZThiNjQtYzc4OGIwZjktY2JjZDM4YWQ=, workerId: [2:7494331817475733431:2370], database: /dc-1, longSession: 1, local sessions count: 3 2025-04-17T17:15:00.544508Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-17T17:15:00.546184Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 86cc7692-22b72a48-885c9968-96eb5b13, RunDataQuery: -- TSaveScriptExecutionResultMetaQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_metas AS JsonDocument; UPDATE `.metadata/script_executions` SET result_set_metas = $result_set_metas WHERE database = $database AND execution_id = $execution_id; 2025-04-17T17:15:00.547305Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=OGFkYjcwNjQtMWE0ZThiNjQtYzc4OGIwZjktY2JjZDM4YWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 8, targetId: [2:7494331817475733431:2370] 2025-04-17T17:15:00.547330Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 8 timeout: 300.000000s actor id: [2:7494331817475733437:2604] 2025-04-17T17:15:00.557987Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Request: create. Transaction completed: 281474976715664. Doublechecking... 2025-04-17T17:15:00.624467Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-17T17:15:00.627107Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-17T17:15:00.669359Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=YWQ4NGJjZWItYzAwYWY3MjYtNTRkOTJkZTAtZjI2ZjFiYQ==, workerId: [2:7494331817475733345:2364], local sessions count: 2 >> TestKinesisHttpProxy::TestRequestWithWrongRegion >> YdbProxy::AlterTable [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundSnapshot |76.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TConsoleTests::TestCreateSubSubDomainExtSubdomain [GOOD] >> TConsoleTests::TestAuthorization >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundSnapshot [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundSnapshotToRegular [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen1 >> YdbProxy::CopyTable [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen1 [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionToRegular >> YdbProxy::CopyTables >> TGRpcStreamingTest::WritesDoneFromClient ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReadRangeChunk100 [GOOD] Test command err: 2025-04-17T17:13:11.389584Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:13:11.392876Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:13:11.393587Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/001102/r3tmp/tmpjNAzIh/pdisk_1.dat 2025-04-17T17:13:11.883638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:13:11.951256Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:13:11.997747Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:13:11.997867Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:13:12.013416Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:13:12.099134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:13:12.198482Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:13:12.199683Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:13:12.204404Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T17:13:12.204658Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:13:12.250012Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:13:12.250910Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:13:12.251033Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:13:12.252933Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:13:12.253019Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:13:12.253075Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:13:12.253417Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:13:12.253528Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:13:12.253595Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T17:13:12.264532Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:13:12.319436Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:13:12.319624Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:13:12.319749Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T17:13:12.319787Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:13:12.319847Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:13:12.319891Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:13:12.320293Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:12.320357Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:12.320634Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:13:12.320722Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:13:12.320843Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:13:12.320900Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:13:12.320950Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T17:13:12.320993Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:13:12.321047Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:13:12.321087Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:13:12.321136Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:13:12.321314Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:12.321357Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:12.321411Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T17:13:12.321783Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T17:13:12.321829Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:13:12.321941Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:13:12.322185Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T17:13:12.322256Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:13:12.322356Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:13:12.322409Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:13:12.322459Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T17:13:12.322502Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T17:13:12.322542Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:13:12.322841Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T17:13:12.322893Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T17:13:12.322931Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:13:12.322989Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:13:12.323039Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T17:13:12.323077Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:13:12.323108Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T17:13:12.323146Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:13:12.323173Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T17:13:12.324740Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T17:13:12.324785Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:13:12.336894Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:13:12.336978Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:13:12.337022Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:13:12.337072Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-17T17:13:12.337175Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T17:13:12.518436Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:12.518497Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:12.518539Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T17:13:12.518942Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-17T17:13:12.518985Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:13:12.519085Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:13:12.519130Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-17T17:13:12.519168Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-17T17:13:12.519217Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-17T17:13:12.523418Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T17:13:12.523486Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:13:12.528689Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:12.528740Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:12.528804Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:13:1 ... :2519], 1}, firstUnprocessedQuery# 0 2025-04-17T17:15:01.441825Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 0 2025-04-17T17:15:01.442171Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:594:2519], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709543002, quota bytes left# 18446744073709000383, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-17T17:15:01.442347Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:986:2787], Recipient [15:986:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-17T17:15:01.442400Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 0 2025-04-17T17:15:01.442442Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 0 2025-04-17T17:15:01.442775Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:594:2519], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542903, quota bytes left# 18446744073708994047, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-17T17:15:01.442936Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:986:2787], Recipient [15:986:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-17T17:15:01.442983Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 0 2025-04-17T17:15:01.443023Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 0 2025-04-17T17:15:01.443344Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:594:2519], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542804, quota bytes left# 18446744073708987711, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-17T17:15:01.443477Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:986:2787], Recipient [15:986:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-17T17:15:01.443518Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 0 2025-04-17T17:15:01.443557Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 0 2025-04-17T17:15:01.443965Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:594:2519], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542705, quota bytes left# 18446744073708981375, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-17T17:15:01.444147Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:986:2787], Recipient [15:986:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-17T17:15:01.444198Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 0 2025-04-17T17:15:01.444242Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 0 2025-04-17T17:15:01.444606Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:594:2519], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542606, quota bytes left# 18446744073708975039, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-17T17:15:01.444788Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:986:2787], Recipient [15:986:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-17T17:15:01.444832Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 0 2025-04-17T17:15:01.444875Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 0 2025-04-17T17:15:01.445228Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:594:2519], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542507, quota bytes left# 18446744073708968703, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-17T17:15:01.445367Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:986:2787], Recipient [15:986:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-17T17:15:01.445414Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 0 2025-04-17T17:15:01.445458Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 0 2025-04-17T17:15:01.445772Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:594:2519], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542408, quota bytes left# 18446744073708962367, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-17T17:15:01.445948Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:986:2787], Recipient [15:986:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-17T17:15:01.445994Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 0 2025-04-17T17:15:01.446035Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 0 2025-04-17T17:15:01.446587Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:594:2519], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542309, quota bytes left# 18446744073708956031, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-17T17:15:01.446724Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:986:2787], Recipient [15:986:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-17T17:15:01.446770Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 0 2025-04-17T17:15:01.446812Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 0 2025-04-17T17:15:01.447144Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:594:2519], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542210, quota bytes left# 18446744073708949695, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-17T17:15:01.447320Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:986:2787], Recipient [15:986:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-17T17:15:01.447371Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 0 2025-04-17T17:15:01.447413Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 0 2025-04-17T17:15:01.447775Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:594:2519], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542111, quota bytes left# 18446744073708943359, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-17T17:15:01.447951Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:986:2787], Recipient [15:986:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-17T17:15:01.448003Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 0 2025-04-17T17:15:01.448048Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 0 2025-04-17T17:15:01.448422Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:594:2519], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542012, quota bytes left# 18446744073708937023, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-17T17:15:01.448561Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:986:2787], Recipient [15:986:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-17T17:15:01.448611Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 0 2025-04-17T17:15:01.448651Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 0 2025-04-17T17:15:01.449007Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:594:2519], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709541913, quota bytes left# 18446744073708930687, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-17T17:15:01.449169Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:986:2787], Recipient [15:986:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-17T17:15:01.449218Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 0 2025-04-17T17:15:01.449264Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 0 2025-04-17T17:15:01.449613Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:594:2519], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709541814, quota bytes left# 18446744073708924351, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-17T17:15:01.449803Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:986:2787], Recipient [15:986:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-17T17:15:01.449855Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 0 2025-04-17T17:15:01.449901Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 0 2025-04-17T17:15:01.450251Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:594:2519], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709541715, quota bytes left# 18446744073708918015, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-17T17:15:01.450386Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:986:2787], Recipient [15:986:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-17T17:15:01.450429Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 0 2025-04-17T17:15:01.450472Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 0 2025-04-17T17:15:01.450797Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:594:2519], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709541616, quota bytes left# 18446744073708911679, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-17T17:15:01.450974Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:986:2787], Recipient [15:986:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-17T17:15:01.451022Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 0 2025-04-17T17:15:01.451066Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 0 2025-04-17T17:15:01.451194Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:594:2519], 1} sends rowCount# 1, bytes# 64, quota rows left# 18446744073709541615, quota bytes left# 18446744073708911615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T17:15:01.451269Z node 15 :TX_DATASHARD DEBUG: 72075186224037890 read iterator# {[15:594:2519], 1} finished in ReadContinue |76.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |76.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |76.6%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_minstep/test-results/unittest/{meta.json ... results_accumulator.log} |76.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionToRegular [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::AlterTable [GOOD] Test command err: 2025-04-17T17:14:56.657931Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494331799456347109:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:56.658150Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/001efb/r3tmp/tmpn8CTas/pdisk_1.dat 2025-04-17T17:14:57.260158Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:14:57.278466Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:14:57.278566Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:14:57.282880Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27105 TServer::EnableGrpc on GrpcPort 29998, node 1 2025-04-17T17:14:57.896285Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:14:57.896308Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:14:57.896315Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:14:57.896422Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27105 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:14:58.656904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:58.709283Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:15:01.656702Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494331799456347109:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:15:01.672010Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:15:02.833344Z node 1 :TX_PROXY ERROR: Actor# [1:7494331825226151402:2314] txid# 281474976710658, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-04-17T17:15:02.857524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T17:15:03.049022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T17:15:03.074755Z node 1 :TX_PROXY ERROR: Actor# [1:7494331829521118815:2394] txid# 281474976710661, issues: { message: "Can\'t drop unknown column: \'extra\'" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_PQv1 [GOOD] Test command err: 2025-04-17T17:12:15.002674Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494331108257628559:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:12:15.002723Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/0017c0/r3tmp/tmphLEx9J/pdisk_1.dat 2025-04-17T17:12:20.682772Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494331108257628559:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:12:20.706118Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:12:23.072241Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:12:25.392763Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494331129732465386:2291];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:12:25.393087Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:12:26.184745Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:26.184773Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:28.806995Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:12:28.868833Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:28.868858Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:28.868867Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:29.912678Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:29.945028Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:29.945052Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:30.288664Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:12:30.289631Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:12:30.591892Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:12:31.577349Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2166} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.197319s 2025-04-17T17:12:31.577694Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:666} StateWork event processing took too much time Type# 2146435078 Duration# 0.197680s TServer::EnableGrpc on GrpcPort 1850, node 1 2025-04-17T17:12:33.650079Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6vg8/0017c0/r3tmp/yandexpO8G7H.tmp 2025-04-17T17:12:33.650100Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6vg8/0017c0/r3tmp/yandexpO8G7H.tmp 2025-04-17T17:12:33.651413Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6vg8/0017c0/r3tmp/yandexpO8G7H.tmp 2025-04-17T17:12:33.651509Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:12:36.011535Z INFO: TTestServer started on Port 61613 GrpcPort 1850 TClient is connected to server localhost:61613 PQClient connected to localhost:1850 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:12:41.406618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-17T17:12:41.777244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T17:12:43.126797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:12:43.126817Z node 1 :IMPORT WARN: Table profiles were not loaded waiting... waiting... 2025-04-17T17:12:47.916761Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331245696582923:2363], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:47.916936Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:47.920400Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331245696582950:2366], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:47.928747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-17T17:12:47.951472Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494331245696582952:2367], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-17T17:12:48.253533Z node 1 :TX_PROXY ERROR: Actor# [1:7494331249991550312:2502] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:12:48.302994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:12:48.377965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:12:48.476090Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494331249991550320:2373], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:12:48.476312Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjRjMzBiZGEtYjhlODk0MWMtMmNhMTZmM2UtNzQ1ZDRhNDg=, ActorId: [1:7494331245696582920:2361], ActorState: ExecuteState, TraceId: 01js2bt8gs8qqjtnw3zab8bv32, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:12:48.500610Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:12:48.521603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7494331249991550623:2685] === CheckClustersList. Ok 2025-04-17T17:12:55.538065Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-17T17:12:55.692273Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-17T17:12:55.698371Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7494331280056321982:2842], Recipient [1:7494331138322400073:2198]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:12:55.698409Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:12:55.698426Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-17T17:12:55.698459Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7494331280056321978:2839], Recipient [1:7494331138322400073:2198]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-04-17T17:12:55.698472Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-17T17:12:55.847776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-t ... 7T17:15:00.157933Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:15:00.157980Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:15:00.158006Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:15:00.158034Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:15:00.165764Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 270794756, Sender [6:7494331796680688872:2781], Recipient [6:7494331796680688872:2781]: NKikimr::TEvKeyValue::TEvCollect 2025-04-17T17:15:00.165916Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 270794756, Sender [6:7494331762320949213:2461], Recipient [6:7494331762320949213:2461]: NKikimr::TEvKeyValue::TEvCollect 2025-04-17T17:15:00.166316Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 270794760, Sender [6:7494331818155526964:3122], Recipient [6:7494331796680688872:2781]: NKikimr::TEvKeyValue::TEvCompleteGC 2025-04-17T17:15:00.166651Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 270794760, Sender [6:7494331818155526965:3123], Recipient [6:7494331762320949213:2461]: NKikimr::TEvKeyValue::TEvCompleteGC 2025-04-17T17:15:00.176411Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494331796680688872:2781], Partition 2, Sender [0:0:0], Recipient [6:7494331796680688980:2796], Cookie: 0 2025-04-17T17:15:00.176482Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494331796680688980:2796]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:00.176513Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:00.176560Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:15:00.176622Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:15:00.176646Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:15:00.176678Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:15:00.176740Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494331762320949213:2461], Partition 0, Sender [0:0:0], Recipient [6:7494331762320949256:2464], Cookie: 0 2025-04-17T17:15:00.176778Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494331762320949256:2464]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:00.176791Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:00.176819Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:15:00.176865Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:15:00.176878Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:15:00.176895Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:15:00.180910Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494331796680688871:2780], Partition 1, Sender [0:0:0], Recipient [6:7494331796680688982:2798], Cookie: 0 2025-04-17T17:15:00.180992Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494331796680688982:2798]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:00.181020Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:00.181069Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:15:00.181127Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:15:00.181150Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:15:00.181179Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:15:00.181233Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494331805270623923:2868], Partition 3, Sender [0:0:0], Recipient [6:7494331805270624012:2877], Cookie: 0 2025-04-17T17:15:00.181269Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494331805270624012:2877]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:00.181285Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:00.181309Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:15:00.181340Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:15:00.181357Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:15:00.181374Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:15:00.255863Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494331805270623919:2867], Partition 4, Sender [0:0:0], Recipient [6:7494331805270624011:2876], Cookie: 0 2025-04-17T17:15:00.255936Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494331805270624011:2876]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:00.255964Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:00.256012Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:15:00.256095Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:15:00.256133Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:15:00.256176Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:15:00.280366Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494331796680688872:2781], Partition 2, Sender [0:0:0], Recipient [6:7494331796680688980:2796], Cookie: 0 2025-04-17T17:15:00.280447Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494331796680688980:2796]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:00.280480Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:00.280531Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:15:00.280618Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:15:00.280647Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:15:00.280681Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:15:00.280750Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494331762320949213:2461], Partition 0, Sender [0:0:0], Recipient [6:7494331762320949256:2464], Cookie: 0 2025-04-17T17:15:00.280801Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494331762320949256:2464]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:00.280817Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:00.280843Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:15:00.280877Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:15:00.280896Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:15:00.280914Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:15:00.281414Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494331796680688871:2780], Partition 1, Sender [0:0:0], Recipient [6:7494331796680688982:2798], Cookie: 0 2025-04-17T17:15:00.281460Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494331796680688982:2798]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:00.281477Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:00.281506Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:15:00.281542Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:15:00.281558Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:15:00.281592Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:15:00.281641Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494331805270623923:2868], Partition 3, Sender [0:0:0], Recipient [6:7494331805270624012:2877], Cookie: 0 2025-04-17T17:15:00.281679Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494331805270624012:2877]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:00.281692Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:00.281717Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:15:00.281750Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:15:00.281770Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:15:00.281802Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_AutoscaleAwareSDK [GOOD] >> TableCreation::ConcurrentUpdateTable [GOOD] >> TGRpcStreamingTest::ReadFinish >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen2 [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundSnapshotPriorityByTime |76.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |76.6%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |76.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundSnapshotPriorityByTime [GOOD] >> TFlatTableExecutor_CachePressure::TestNotEnoughLocalCache [GOOD] >> TFlatTableExecutor_Cold::ColdBorrowScan >> EvWrite::WriteWithSplit [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips-EvWrite [GOOD] >> TFlatTableExecutor_Cold::ColdBorrowScan [GOOD] >> TFlatTableExecutor_ColumnGroups::TestManyRows |76.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |76.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |76.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table >> YdbProxy::MakeDirectory [GOOD] >> YdbProxy::OAuthToken >> KqpProxy::NoUserAccessToScriptExecutionsTable [GOOD] >> TEvLocalSyncDataTests::SqueezeBlocks3 [GOOD] >> TQuorumTrackerTests::Erasure4Plus2BlockIncludingMyFailDomain_8_2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> EvWrite::WriteWithSplit [GOOD] Test command err: 2025-04-17T17:14:30.723750Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T17:14:30.852934Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T17:14:30.881013Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T17:14:30.881317Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T17:14:30.889381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T17:14:30.889609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T17:14:30.889822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T17:14:30.889930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T17:14:30.890062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T17:14:30.890186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T17:14:30.890285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T17:14:30.890384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T17:14:30.890494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T17:14:30.890623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T17:14:30.890749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T17:14:30.890852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T17:14:30.919550Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T17:14:30.919822Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T17:14:30.919892Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T17:14:30.920154Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T17:14:30.920323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T17:14:30.920397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T17:14:30.920435Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T17:14:30.920567Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T17:14:30.920637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T17:14:30.920684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T17:14:30.920713Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T17:14:30.920863Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T17:14:30.920931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T17:14:30.920978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T17:14:30.921005Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T17:14:30.921105Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T17:14:30.921159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T17:14:30.921198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T17:14:30.921231Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T17:14:30.921304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T17:14:30.921348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T17:14:30.921375Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T17:14:30.921417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T17:14:30.921452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T17:14:30.921481Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T17:14:30.921850Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=54; 2025-04-17T17:14:30.921925Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=33; 2025-04-17T17:14:30.921986Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=28; 2025-04-17T17:14:30.922080Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=55; 2025-04-17T17:14:30.922232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T17:14:30.922308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T17:14:30.922345Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T17:14:30.922533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T17:14:30.922575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T17:14:30.922599Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T17:14:30.922770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T17:14:30.922813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T17:14:30.922841Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T17:14:30.923006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T17:14:30.923047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T17:14:30.923078Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T17:14:30.923185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T17:14:30.923226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T17:14:30.923263Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... 132010Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:856:2873];bytes=3691800;rows=450;faults=0;finished=0;fault=0;schema=key: uint64 field: string; 2025-04-17T17:15:05.132133Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:1;records_count:149;schema=key: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-17T17:15:05.132249Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:149;schema=key: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-17T17:15:05.132295Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-17T17:15:05.132335Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-17T17:15:05.133638Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T17:15:05.133749Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:149;schema=key: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-17T17:15:05.133789Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-17T17:15:05.133868Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;);columns=2;rows=149; 2025-04-17T17:15:05.133919Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=1222396;num_rows=149;batch_columns=key,field; 2025-04-17T17:15:05.134028Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:856:2873];bytes=1222396;rows=149;faults=0;finished=0;fault=0;schema=key: uint64 field: string; 2025-04-17T17:15:05.134126Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-17T17:15:05.134232Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-17T17:15:05.134377Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-17T17:15:05.134944Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T17:15:05.135043Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-17T17:15:05.135155Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-17T17:15:05.135209Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:865:2882] finished for tablet 9437184 2025-04-17T17:15:05.135805Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:856:2873];stats={"p":[{"events":["f_bootstrap"],"t":0.001},{"events":["f_ProduceResults"],"t":0.004},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.012},{"events":["l_task_result"],"t":0.772},{"events":["f_ack"],"t":25},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":25.016}],"full":{"a":1744910080119032,"name":"_full_task","f":1744910080119032,"d_finished":0,"c":0,"l":1744910105135280,"d":25016248},"events":[{"name":"bootstrap","f":1744910080120306,"d_finished":10921,"c":1,"l":1744910080131227,"d":10921},{"a":1744910105134919,"name":"ack","f":1744910105120000,"d_finished":8390,"c":9,"l":1744910105134411,"d":8751},{"a":1744910105134899,"name":"processing","f":1744910080131454,"d_finished":299284,"c":53,"l":1744910105134413,"d":299665},{"name":"ProduceResults","f":1744910080123855,"d_finished":35661,"c":64,"l":1744910105135179,"d":35661},{"a":1744910105135183,"name":"Finish","f":1744910105135183,"d_finished":0,"c":0,"l":1744910105135280,"d":97},{"name":"task_result","f":1744910080131484,"d_finished":289594,"c":44,"l":1744910080891571,"d":289594}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-17T17:15:05.135905Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:856:2873];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T17:15:05.136404Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:856:2873];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0.001},{"events":["f_ProduceResults"],"t":0.004},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.012},{"events":["l_task_result"],"t":0.772},{"events":["f_ack"],"t":25},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":25.016}],"full":{"a":1744910080119032,"name":"_full_task","f":1744910080119032,"d_finished":0,"c":0,"l":1744910105135960,"d":25016928},"events":[{"name":"bootstrap","f":1744910080120306,"d_finished":10921,"c":1,"l":1744910080131227,"d":10921},{"a":1744910105134919,"name":"ack","f":1744910105120000,"d_finished":8390,"c":9,"l":1744910105134411,"d":9431},{"a":1744910105134899,"name":"processing","f":1744910080131454,"d_finished":299284,"c":53,"l":1744910105134413,"d":300345},{"name":"ProduceResults","f":1744910080123855,"d_finished":35661,"c":64,"l":1744910105135179,"d":35661},{"a":1744910105135183,"name":"Finish","f":1744910105135183,"d_finished":0,"c":0,"l":1744910105135960,"d":777},{"name":"task_result","f":1744910080131484,"d_finished":289594,"c":44,"l":1744910080891571,"d":289594}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-17T17:15:05.136492Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T17:14:40.117871Z;index_granules=0;index_portions=5;index_batches=2052;committed_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=17133336;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=17133336;selected_rows=0; 2025-04-17T17:15:05.136546Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T17:15:05.136799Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; |76.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::ConcurrentUpdateTable [GOOD] Test command err: 2025-04-17T17:14:51.664623Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494331778317239843:2080];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:51.668058Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/0021a4/r3tmp/tmpXxPwH0/pdisk_1.dat 2025-04-17T17:14:52.363433Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:14:52.374479Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:14:52.374559Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:14:52.387416Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25140 TServer::EnableGrpc on GrpcPort 11471, node 1 2025-04-17T17:14:53.024856Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:14:53.024881Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:14:53.024890Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:14:53.025018Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T17:14:53.483680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:53.512537Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:14:56.128787Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-17T17:14:56.131501Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-17T17:14:56.137719Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-17T17:14:56.137746Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-17T17:14:56.137765Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-17T17:14:56.137804Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-17T17:14:56.137855Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:14:56.137887Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:14:56.141254Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2025-04-17T17:14:56.141262Z node 1 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2025-04-17T17:14:56.141341Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-04-17T17:14:56.141952Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-04-17T17:14:56.141959Z node 1 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2025-04-17T17:14:56.141980Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-04-17T17:14:56.142211Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2025-04-17T17:14:56.142215Z node 1 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2025-04-17T17:14:56.142234Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-04-17T17:14:56.146644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:1, at schemeshard: 72057594046644480 2025-04-17T17:14:56.148825Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:14:56.148881Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:14:56.150396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T17:14:56.156055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T17:14:56.163440Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 } 2025-04-17T17:14:56.163504Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Subscribe on create table tx: 281474976710658 2025-04-17T17:14:56.163594Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 } 2025-04-17T17:14:56.163614Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Subscribe on create table tx: 281474976710660 2025-04-17T17:14:56.166074Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 5 } 2025-04-17T17:14:56.166134Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Subscribe on create table tx: 281474976710659 2025-04-17T17:14:56.380644Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Request: create. Transaction completed: 281474976710658. Doublechecking... 2025-04-17T17:14:56.444504Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Column diff is empty, finishing 2025-04-17T17:14:56.445308Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Request: create. Transaction completed: 281474976710659. Doublechecking... 2025-04-17T17:14:56.459189Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Request: create. Transaction completed: 281474976710660. Doublechecking... 2025-04-17T17:14:56.542237Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Column diff is empty, finishing 2025-04-17T17:14:56.543077Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Column diff is empty, finishing 2025-04-17T17:14:56.543447Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 2ddee8fe-b520b45f-fcb415cc-9f3afbdd, Bootstrap. Database: /dc-1 2025-04-17T17:14:56.561839Z node 1 :KQP_PROXY DEBUG: Request has 18444999163612.989810s seconds to be completed 2025-04-17T17:14:56.564572Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=ZTcyZDgyNmYtODQ2ZWMyYjMtZGJiZDZmNWEtOWU2MDM0OWY=, workerId: [1:7494331799792077194:2334], database: /dc-1, longSession: 1, local sessions count: 1 2025-04-17T17:14:56.564727Z node 1 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-17T17:14:56.565716Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 2ddee8fe-b520b45f-fcb415cc-9f3afbdd, RunDataQuery: -- TCreateScriptOperationQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $run_script_actor_id AS Text; DECLARE $execution_status AS Int32; DECLARE $execution_mode AS Int32; DECLARE $query_text AS Text; DECLARE $syntax AS Int32; DECLARE $meta AS JsonDocument; DECLARE $lease_duration AS Interval; DECLARE $execution_meta_ttl AS Interval; UPSERT INTO `.metadata/script_executions` (database, execution_id, run_script_actor_id, execution_status, execution_mode, start_ts, query_text, syntax, meta, expire_at) VALUES ($database, $execution_id, $run_script_actor_id, $execution_status, $execution_mode, CurrentUtcTimestamp(), $query_text, $syntax, $meta, CurrentUtcTimestamp() + $execution_meta_ttl); UPSERT INTO `.metadata/script_execution_leases` (database, execution_id, lease_deadline, lease_generation, expire_at) VALUES ($database, $execution_id, CurrentUtcTimestamp() + $lease_duration, 1, CurrentUtcTimestamp() + $execution_meta_ttl); 2025-04-17T17:14:56.566723Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=ZTcyZDgyNmYtODQ2ZWMyYjMtZGJiZDZmNWEtOWU2MDM0OWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7494331799792077194:2334] 2025-04-17T17:14:56.566756Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [1:7494331799792077196:2472] 2025-04-17T17:14:56.568952Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331799792077206:2339], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:56.570548Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331799792077197:2336], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:56.570702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:56.572533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:2, at schemeshard: 72057594046644480 2025-04-17T17:14:56.583947Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-04-17T17:14:56.588348Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494331799792077211:2340], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-17T17:14:56.664974Z ... nerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } 2025-04-17T17:15:04.363081Z node 2 :TX_PROXY ERROR: Actor# [2:7494331832439665995:2645] txid# 281474976715674, issues: { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } 2025-04-17T17:15:04.363163Z node 2 :TX_PROXY ERROR: Actor# [2:7494331832439665996:2646] txid# 281474976715675, issues: { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } 2025-04-17T17:15:04.363359Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976715672 Issues { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" SchemeShardTabletId: 72057594046644480 } 2025-04-17T17:15:04.363367Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Unable to subscribe to concurrent transaction, falling back 2025-04-17T17:15:04.363437Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976715667 Issues { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" SchemeShardTabletId: 72057594046644480 } 2025-04-17T17:15:04.363442Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Unable to subscribe to concurrent transaction, falling back 2025-04-17T17:15:04.363492Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976715674 Issues { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" SchemeShardTabletId: 72057594046644480 } 2025-04-17T17:15:04.363497Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Unable to subscribe to concurrent transaction, falling back 2025-04-17T17:15:04.364081Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976715675 Issues { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" SchemeShardTabletId: 72057594046644480 } 2025-04-17T17:15:04.364093Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Unable to subscribe to concurrent transaction, falling back 2025-04-17T17:15:04.383930Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Request: alter. Transaction completed: 281474976715666. Doublechecking... 2025-04-17T17:15:04.412594Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-17T17:15:04.412676Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-17T17:15:04.428447Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-17T17:15:04.428507Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-17T17:15:04.452533Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-17T17:15:04.459006Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-17T17:15:04.459102Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-17T17:15:04.459171Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-17T17:15:04.459246Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-17T17:15:04.459316Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-17T17:15:04.501340Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=ZjRiMjVmMDktMmJkMTY3ZjAtOGVmMTQzMjItZGJjODdlMTA=, workerId: [2:7494331832439665850:2362], local sessions count: 2 2025-04-17T17:15:04.503813Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 8, sender: [2:7494331832439665922:2367], selfId: [2:7494331810964828387:2098], source: [2:7494331832439665920:2366] 2025-04-17T17:15:04.504102Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 54373db-3b97db29-f6f3167e-ea10e380, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NmFlYzhkNDQtNjFkOGY5NjUtYjQ5YTM2YmYtOTAxZWNkMDY=, TxId: 2025-04-17T17:15:04.504144Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 54373db-3b97db29-f6f3167e-ea10e380, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NmFlYzhkNDQtNjFkOGY5NjUtYjQ5YTM2YmYtOTAxZWNkMDY=, TxId: 2025-04-17T17:15:04.504264Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 54373db-3b97db29-f6f3167e-ea10e380, start saving rows range [0; 1) 2025-04-17T17:15:04.504377Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 54373db-3b97db29-f6f3167e-ea10e380, Bootstrap. Database: /dc-1 2025-04-17T17:15:04.504552Z node 2 :KQP_PROXY DEBUG: Request has 18444999163605.047083s seconds to be completed 2025-04-17T17:15:04.506552Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=YzgxZDdlYzMtM2MxYzg5ZjItOTNjMTQ4N2UtMWY2ZTA5Mzc=, workerId: [2:7494331832439666095:2380], database: /dc-1, longSession: 1, local sessions count: 3 2025-04-17T17:15:04.506700Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-17T17:15:04.506961Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=NmFlYzhkNDQtNjFkOGY5NjUtYjQ5YTM2YmYtOTAxZWNkMDY=, workerId: [2:7494331832439665920:2366], local sessions count: 2 2025-04-17T17:15:04.507193Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 54373db-3b97db29-f6f3167e-ea10e380, RunDataQuery: -- TSaveScriptExecutionResultQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_id AS Int32; DECLARE $expire_at AS Optional; DECLARE $items AS List>; UPSERT INTO `.metadata/result_sets` SELECT $database as database, $execution_id as execution_id, $result_set_id as result_set_id, T.row_id as row_id, $expire_at as expire_at, T.result_set as result_set, T.accumulated_size as accumulated_size FROM AS_TABLE($items) AS T; 2025-04-17T17:15:04.508775Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=YzgxZDdlYzMtM2MxYzg5ZjItOTNjMTQ4N2UtMWY2ZTA5Mzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 10, targetId: [2:7494331832439666095:2380] 2025-04-17T17:15:04.508829Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 10 timeout: 300.000000s actor id: [2:7494331832439666098:2718] 2025-04-17T17:15:04.713448Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 10, sender: [2:7494331832439666097:2381], selfId: [2:7494331810964828387:2098], source: [2:7494331832439666095:2380] 2025-04-17T17:15:04.714154Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 54373db-3b97db29-f6f3167e-ea10e380, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YzgxZDdlYzMtM2MxYzg5ZjItOTNjMTQ4N2UtMWY2ZTA5Mzc=, TxId: 2025-04-17T17:15:04.714177Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 54373db-3b97db29-f6f3167e-ea10e380, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YzgxZDdlYzMtM2MxYzg5ZjItOTNjMTQ4N2UtMWY2ZTA5Mzc=, TxId: 2025-04-17T17:15:04.714250Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 54373db-3b97db29-f6f3167e-ea10e380, result part successfully saved 2025-04-17T17:15:04.714267Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 54373db-3b97db29-f6f3167e-ea10e380, reply SUCCESS, issues: 2025-04-17T17:15:04.714407Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 54373db-3b97db29-f6f3167e-ea10e380, Bootstrap. Database: /dc-1 2025-04-17T17:15:04.714682Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=YzgxZDdlYzMtM2MxYzg5ZjItOTNjMTQ4N2UtMWY2ZTA5Mzc=, workerId: [2:7494331832439666095:2380], local sessions count: 1 2025-04-17T17:15:04.714770Z node 2 :KQP_PROXY DEBUG: Request has 18444999163604.836859s seconds to be completed 2025-04-17T17:15:04.716611Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=ZTYxODYyNzQtM2Q0MDEzNGQtMjM1YjhhNWYtNDU2NmIzZWQ=, workerId: [2:7494331832439666129:2391], database: /dc-1, longSession: 1, local sessions count: 2 2025-04-17T17:15:04.717058Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-17T17:15:04.717247Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 54373db-3b97db29-f6f3167e-ea10e380, RunDataQuery: -- TSaveScriptFinalStatusActor::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT operation_status, finalization_status, meta, customer_supplied_id, user_token, script_sinks, script_secret_names FROM `.metadata/script_executions` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_generation FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-04-17T17:15:04.717517Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=ZTYxODYyNzQtM2Q0MDEzNGQtMjM1YjhhNWYtNDU2NmIzZWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 12, targetId: [2:7494331832439666129:2391] 2025-04-17T17:15:04.717544Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 12 timeout: 300.000000s actor id: [2:7494331832439666131:2732] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_AutoscaleAwareSDK [GOOD] Test command err: 2025-04-17T17:12:16.350713Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494331112344673602:2130];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:12:16.352468Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:12:21.680835Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/001779/r3tmp/tmpbxRjqQ/pdisk_1.dat 2025-04-17T17:12:22.825874Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494331112344673602:2130];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:12:22.836673Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:12:25.056212Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494331129524542977:2289];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:12:25.056307Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:12:26.201295Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:26.201678Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:26.576256Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:27.808516Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:27.808849Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:27.969219Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:30.143193Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:12:30.868714Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:12:30.869101Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:12:30.902506Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24030, node 1 2025-04-17T17:12:34.701484Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:34.702126Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:34.702138Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:35.049948Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6vg8/001779/r3tmp/yandexnc2jjc.tmp 2025-04-17T17:12:35.049968Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6vg8/001779/r3tmp/yandexnc2jjc.tmp 2025-04-17T17:12:35.051752Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6vg8/001779/r3tmp/yandexnc2jjc.tmp 2025-04-17T17:12:35.052281Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:12:35.324350Z INFO: TTestServer started on Port 1970 GrpcPort 24030 TClient is connected to server localhost:1970 PQClient connected to localhost:24030 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:12:39.645873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:12:39.694768Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:12:39.744653Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:12:39.753544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:12:44.917166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:12:44.917819Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:12:48.509858Z node 1 :KQP_PROXY ERROR: TraceId: "01js2bt2rb9ry1b3k4mz5xv1rc", Request deadline has expired for 1.717723s seconds 2025-04-17T17:12:48.524927Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331249783627900:2361], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:48.525038Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:48.525581Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331249783627912:2364], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:48.529488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-17T17:12:48.549251Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494331249783627914:2365], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-17T17:12:48.633961Z node 1 :TX_PROXY ERROR: Actor# [1:7494331249783627978:2497] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:12:48.915635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:12:48.922501Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494331249783627986:2371], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:12:48.933127Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjJjNzk0OGYtMjlhMGMwODctM2Y0NjE1ZTItZWUzMzg3NDY=, ActorId: [1:7494331249783627897:2359], ActorState: ExecuteState, TraceId: 01js2bt93xfzfc2tjkagvm9hqw, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:12:48.935335Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:12:48.998885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:12:49.122123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7494331254078595575:2677] === CheckClustersList. Ok 2025-04-17T17:12:54.344271Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-17T17:12:54.370551Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-17T17:12:54.371699Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7494331275553432279:2796], Recipient [1:7494331155294346914:2189]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:12:54.371731Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:12:54.371741Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-17T17:12:54.371800Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7494331275553432275:2793], Recipient [1:7494331155294346914:2189]: {TEvModifySchemeTransaction txid# 281474976710672 TabletId# 7205759404664448 ... rofiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } 2025-04-17T17:15:01.856634Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188536 (NKikimr::TEvPQ::TEvSubDomainStatus), Tablet [6:7494331814502310279:2792], Partition 2, Sender [6:7494331814502310279:2792], Recipient [6:7494331814502310367:2803], Cookie: 0 2025-04-17T17:15:01.856670Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188536, Sender [6:7494331814502310279:2792], Recipient [6:7494331814502310367:2803]: NKikimrPQ.TEvSubDomainStatus SubDomainOutOfSpace: false 2025-04-17T17:15:01.856681Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvSubDomainStatus 2025-04-17T17:15:01.856725Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 271188503, Sender [6:7494331814502310370:2805], Recipient [6:7494331814502310283:2793]: NKikimr::TEvPQ::TEvPartitionLabeledCounters 2025-04-17T17:15:01.856746Z node 6 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvPartitionLabeledCounters 2025-04-17T17:15:01.856864Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 271188503, Sender [6:7494331780142570654:2465], Recipient [6:7494331780142570595:2462]: NKikimr::TEvPQ::TEvPartitionLabeledCounters 2025-04-17T17:15:01.856875Z node 6 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvPartitionLabeledCounters 2025-04-17T17:15:01.856970Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 271188503, Sender [6:7494331814502310367:2803], Recipient [6:7494331814502310279:2792]: NKikimr::TEvPQ::TEvPartitionLabeledCounters 2025-04-17T17:15:01.856983Z node 6 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvPartitionLabeledCounters 2025-04-17T17:15:01.857323Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Send TEvPeriodicTopicStats PathId: 13 Generation: 1 StatsReportRound: 9 DataSize: 0 UsedReserveSize: 0 2025-04-17T17:15:01.857446Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] ProcessPendingStats. PendingUpdates size 3 2025-04-17T17:15:01.859290Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271188001, Sender [6:7494331780142570594:2461], Recipient [6:7494331732897929196:2147]: NKikimrPQ.TEvPeriodicTopicStats PathId: 13 Generation: 1 Round: 9 DataSize: 0 UsedReserveSize: 0 SubDomainOutOfSpace: false 2025-04-17T17:15:01.859325Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPersQueue::TEvPeriodicTopicStats 2025-04-17T17:15:01.859342Z node 6 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046644480, LocalPathId: 13] DataSize 0 UsedReserveSize 0 2025-04-17T17:15:01.859364Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTopicStats on# 0.099995s, queue# 1 2025-04-17T17:15:01.889630Z :INFO: [/Root] TraceId [] SessionId [producer-1|ea6db3aa-c6cd1826-6d678811-3e54abdc_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2025-04-17T17:15:01.889682Z :INFO: [/Root] TraceId [] SessionId [producer-1|ea6db3aa-c6cd1826-6d678811-3e54abdc_0] PartitionId [0] Generation [1] Write session will now close 2025-04-17T17:15:01.889744Z :DEBUG: [/Root] TraceId [] SessionId [producer-1|ea6db3aa-c6cd1826-6d678811-3e54abdc_0] PartitionId [0] Generation [1] Write session: aborting 2025-04-17T17:15:01.890262Z :INFO: [/Root] TraceId [] SessionId [producer-1|ea6db3aa-c6cd1826-6d678811-3e54abdc_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2025-04-17T17:15:01.890300Z :DEBUG: [/Root] TraceId [] SessionId [producer-1|ea6db3aa-c6cd1826-6d678811-3e54abdc_0] PartitionId [0] Generation [1] Write session: destroy 2025-04-17T17:15:01.908166Z node 6 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_6_1_3396814231714183018_v1 grpc read done: success# 0, data# { } 2025-04-17T17:15:01.908206Z node 6 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_6_1_3396814231714183018_v1 grpc read failed 2025-04-17T17:15:01.908236Z node 6 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_6_1_3396814231714183018_v1 grpc closed 2025-04-17T17:15:01.908281Z node 6 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_6_1_3396814231714183018_v1 is DEAD 2025-04-17T17:15:01.909078Z node 6 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: producer-1|ea6db3aa-c6cd1826-6d678811-3e54abdc_0 grpc read done: success: 0 data: 2025-04-17T17:15:01.909091Z node 6 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: producer-1|ea6db3aa-c6cd1826-6d678811-3e54abdc_0 grpc read failed 2025-04-17T17:15:01.909114Z node 6 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: producer-1|ea6db3aa-c6cd1826-6d678811-3e54abdc_0 grpc closed 2025-04-17T17:15:01.909126Z node 6 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: producer-1|ea6db3aa-c6cd1826-6d678811-3e54abdc_0 is DEAD 2025-04-17T17:15:01.909762Z node 6 :PQ_PARTITION_CHOOSER TRACE: StateIdle, received event# 65543, Sender [6:7494331810207342819:2758], Recipient [6:7494331810207342822:2758]: NActors::TEvents::TEvPoison 2025-04-17T17:15:01.909826Z node 6 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-17T17:15:01.912068Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [6:7494331814502310441:3524], Recipient [6:7494331814502310279:2792]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:15:01.912138Z node 6 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:15:01.912165Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:15:01.912190Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Destroy direct read session test-consumer_6_1_3396814231714183018_v1 2025-04-17T17:15:01.912227Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [6:7494331814502310433:2824] destroyed 2025-04-17T17:15:01.912284Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [6:7494331814502310439:3522], Recipient [6:7494331814502310283:2793]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:15:01.912298Z node 6 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:15:01.912313Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:15:01.912327Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037897] Destroy direct read session test-consumer_6_1_3396814231714183018_v1 2025-04-17T17:15:01.912348Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037897] server disconnected, pipe [6:7494331814502310432:2823] destroyed 2025-04-17T17:15:01.912455Z node 6 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_6_1_3396814231714183018_v1 2025-04-17T17:15:01.912477Z node 6 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_6_1_3396814231714183018_v1 2025-04-17T17:15:01.912518Z node 6 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][test-topic] pipe [6:7494331810207342810:2751] disconnected; active server actors: 1 2025-04-17T17:15:01.912540Z node 6 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][test-topic] pipe [6:7494331810207342810:2751] client test-consumer disconnected session test-consumer_6_1_3396814231714183018_v1 2025-04-17T17:15:01.912656Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [6:7494331810207342861:3374], Recipient [6:7494331780142570595:2462]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:15:01.912671Z node 6 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:15:01.912685Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:15:01.912708Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [6:7494331810207342859:2758] destroyed 2025-04-17T17:15:01.912750Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [6:7494331810207342815:3355], Recipient [6:7494331780142570595:2462]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:15:01.912765Z node 6 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:15:01.912775Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:15:01.912788Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session test-consumer_6_1_3396814231714183018_v1 2025-04-17T17:15:01.912809Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [6:7494331810207342814:2756] destroyed 2025-04-17T17:15:01.912872Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188506 (NKikimr::TEvPQ::TEvPipeDisconnected), Tablet [6:7494331780142570595:2462], Partition 0, Sender [6:7494331780142570595:2462], Recipient [6:7494331780142570654:2465], Cookie: 0 2025-04-17T17:15:01.912924Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188506, Sender [6:7494331780142570595:2462], Recipient [6:7494331780142570654:2465]: NKikimr::TEvPQ::TEvPipeDisconnected 2025-04-17T17:15:01.912954Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvPipeDisconnected 2025-04-17T17:15:01.912985Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-17T17:15:01.913022Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessChangeOwnerRequests. 2025-04-17T17:15:01.913057Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:15:01.913126Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:15:01.913155Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:15:01.913184Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:15:01.913217Z node 6 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_6_1_3396814231714183018_v1 2025-04-17T17:15:01.960652Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435095, Sender [0:0:0], Recipient [6:7494331732897929196:2147]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTopicStats 2025-04-17T17:15:01.960706Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTopicStats 2025-04-17T17:15:01.960743Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-17T17:15:01.960755Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-17T17:15:01.960801Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTopicStats on# 0.000000s, queue# 1 2025-04-17T17:15:01.968390Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435095, Sender [0:0:0], Recipient [6:7494331732897929196:2147]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTopicStats 2025-04-17T17:15:01.968441Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTopicStats 2025-04-17T17:15:01.968467Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 |76.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TQuorumTrackerTests::Erasure4Plus2BlockIncludingMyFailDomain_8_2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips-EvWrite [GOOD] Test command err: 2025-04-17T17:13:09.097150Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:13:09.097492Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:13:09.097529Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/001143/r3tmp/tmpCFaKyx/pdisk_1.dat 2025-04-17T17:13:09.482868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:13:09.530450Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:13:09.580092Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:13:09.580240Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:13:09.591796Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:13:09.688516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:13:09.736761Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:13:09.737637Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:13:09.738017Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T17:13:09.738301Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:13:09.786535Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:13:09.787287Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:13:09.787402Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:13:09.791136Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:13:09.791240Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:13:09.791298Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:13:09.791650Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:13:09.791792Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:13:09.791862Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T17:13:09.805671Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:13:09.834633Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:13:09.834792Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:13:09.834892Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T17:13:09.834924Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:13:09.834954Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:13:09.834989Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:13:09.835347Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:09.835417Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:09.835680Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:13:09.835764Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:13:09.835865Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:13:09.835912Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:13:09.835954Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T17:13:09.835988Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:13:09.836020Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:13:09.836049Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:13:09.836087Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:13:09.836216Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:09.836250Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:09.836291Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T17:13:09.836624Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T17:13:09.836679Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:13:09.836784Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:13:09.837002Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T17:13:09.837055Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:13:09.837117Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:13:09.837164Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:13:09.837194Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T17:13:09.837222Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T17:13:09.837261Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:13:09.837511Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T17:13:09.837542Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T17:13:09.837575Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:13:09.837604Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:13:09.837643Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T17:13:09.837668Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:13:09.837701Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T17:13:09.837751Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:13:09.837782Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T17:13:09.839008Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T17:13:09.839054Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:13:09.852583Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:13:09.852663Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:13:09.852698Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:13:09.852732Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-17T17:13:09.852820Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T17:13:10.024560Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:10.024616Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:10.024649Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T17:13:10.025046Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-17T17:13:10.025085Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:13:10.025194Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:13:10.025235Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-17T17:13:10.025267Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-17T17:13:10.025298Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-17T17:13:10.033383Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T17:13:10.033455Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:13:10.034164Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:10.034204Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:10.034248Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:13:1 ... ed 0 immediate 0 planned 0 2025-04-17T17:15:04.727126Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-17T17:15:04.727195Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:15:04.727262Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:15:04.727548Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [15:885:2711], Recipient [15:885:2711]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:15:04.727591Z node 15 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:15:04.727650Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T17:15:04.727710Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:15:04.727749Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-17T17:15:04.727792Z node 15 :TX_DATASHARD DEBUG: Found ready operation [3500:281474976715666] in PlanQueue unit at 72075186224037889 2025-04-17T17:15:04.727841Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit PlanQueue 2025-04-17T17:15:04.727877Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-17T17:15:04.727911Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit PlanQueue 2025-04-17T17:15:04.727947Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit LoadTxDetails 2025-04-17T17:15:04.727981Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit LoadTxDetails 2025-04-17T17:15:04.744274Z node 15 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3500:281474976715666 keys extracted: 0 2025-04-17T17:15:04.744411Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-17T17:15:04.744473Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit LoadTxDetails 2025-04-17T17:15:04.744532Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-04-17T17:15:04.744586Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit BuildAndWaitDependencies 2025-04-17T17:15:04.744663Z node 15 :TX_DATASHARD TRACE: Operation [3500:281474976715666] is the new logically complete end at 72075186224037889 2025-04-17T17:15:04.744711Z node 15 :TX_DATASHARD TRACE: Operation [3500:281474976715666] is the new logically incomplete end at 72075186224037889 2025-04-17T17:15:04.744765Z node 15 :TX_DATASHARD TRACE: Activated operation [3500:281474976715666] at 72075186224037889 2025-04-17T17:15:04.744823Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-17T17:15:04.744853Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-04-17T17:15:04.744884Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CreateVolatileSnapshot 2025-04-17T17:15:04.744918Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CreateVolatileSnapshot 2025-04-17T17:15:04.745074Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is ExecutedNoMoreRestarts 2025-04-17T17:15:04.745108Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CreateVolatileSnapshot 2025-04-17T17:15:04.745159Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit DropVolatileSnapshot 2025-04-17T17:15:04.745209Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit DropVolatileSnapshot 2025-04-17T17:15:04.745240Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-17T17:15:04.745267Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit DropVolatileSnapshot 2025-04-17T17:15:04.745296Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CompleteOperation 2025-04-17T17:15:04.745325Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CompleteOperation 2025-04-17T17:15:04.745512Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is DelayComplete 2025-04-17T17:15:04.745552Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CompleteOperation 2025-04-17T17:15:04.745595Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CompletedOperations 2025-04-17T17:15:04.745644Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CompletedOperations 2025-04-17T17:15:04.745683Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-17T17:15:04.745713Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CompletedOperations 2025-04-17T17:15:04.745743Z node 15 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715666] at 72075186224037889 has finished 2025-04-17T17:15:04.745793Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:15:04.745843Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-17T17:15:04.745892Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-04-17T17:15:04.745938Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-04-17T17:15:04.761106Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3500} 2025-04-17T17:15:04.761282Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:15:04.761376Z node 15 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715666] at 72075186224037888 on unit CompleteOperation 2025-04-17T17:15:04.761506Z node 15 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715666] from 72075186224037888 at tablet 72075186224037888 send result to client [15:1090:2867], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T17:15:04.761613Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:15:04.762036Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3500} 2025-04-17T17:15:04.762098Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-17T17:15:04.762130Z node 15 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715666] at 72075186224037889 on unit CompleteOperation 2025-04-17T17:15:04.762175Z node 15 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715666] from 72075186224037889 at tablet 72075186224037889 send result to client [15:1090:2867], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T17:15:04.762214Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T17:15:04.764162Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [15:594:2519], Recipient [15:667:2571]: NKikimrTxDataShard.TEvRead ReadId: 10 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715666 } LockTxId: 1011121314 ResultFormat: FORMAT_ARROW KeysSize: 1 2025-04-17T17:15:04.764389Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-17T17:15:04.764511Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2025-04-17T17:15:04.764658Z node 15 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-17T17:15:04.764744Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2025-04-17T17:15:04.764819Z node 15 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-17T17:15:04.764891Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-17T17:15:04.764944Z node 15 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037888 2025-04-17T17:15:04.765017Z node 15 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-17T17:15:04.765057Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T17:15:04.765087Z node 15 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2025-04-17T17:15:04.765117Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2025-04-17T17:15:04.765305Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 10 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715666 } LockTxId: 1011121314 ResultFormat: FORMAT_ARROW } 2025-04-17T17:15:04.765842Z node 15 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 1011121314, counter# 18446744073709551615 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-17T17:15:04.765944Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3500/281474976715666 2025-04-17T17:15:04.766022Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:594:2519], 10} after executionsCount# 1 2025-04-17T17:15:04.766134Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:594:2519], 10} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T17:15:04.766407Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:594:2519], 10} finished in read 2025-04-17T17:15:04.766522Z node 15 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-17T17:15:04.766557Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2025-04-17T17:15:04.766586Z node 15 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T17:15:04.766618Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2025-04-17T17:15:04.766676Z node 15 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-17T17:15:04.766702Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T17:15:04.766741Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2025-04-17T17:15:04.766815Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-17T17:15:04.767039Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 |76.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |76.7%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |76.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut >> LocalPartitionReader::Booting ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::AuthEffectivePermissions [GOOD] Test command err: 2025-04-17T17:11:29.496903Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494330911613161358:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:11:29.497191Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/000a43/r3tmp/tmp2SrdlV/pdisk_1.dat 2025-04-17T17:11:30.535687Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:11:30.671018Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:11:30.671135Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:11:30.691989Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:30.699206Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9293, node 1 2025-04-17T17:11:31.414106Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:11:31.416722Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:11:31.417385Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:11:31.421599Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27339 TClient is connected to server localhost:27339 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:11:32.152331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:11:34.496477Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494330911613161358:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:11:34.496544Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:11:40.402666Z node 1 :KQP_PROXY ERROR: TraceId: "01js2bqypdd6bctbrbb622dy2y", Request deadline has expired for 3.102253s seconds (NYdb::Dev::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession 2025-04-17T17:11:44.366201Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7494330975038127046:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:11:44.366282Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/000a43/r3tmp/tmpIto8WN/pdisk_1.dat 2025-04-17T17:11:47.727993Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:11:48.180369Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:48.276314Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:11:48.276390Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:11:48.280234Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12373, node 6 2025-04-17T17:11:49.376155Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494330975038127046:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:11:49.376252Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:11:51.895799Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:11:51.932637Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:11:51.932653Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:11:51.932829Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28749 TClient is connected to server localhost:28749 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:11:57.907519Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:12:03.084192Z node 6 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:12:03.084221Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:12:03.500551Z node 6 :KQP_PROXY ERROR: TraceId: "01js2brrbw9ztj0z0qdvpapcp3", Request deadline has expired for 0.021421s seconds (NYdb::Dev::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession 2025-04-17T17:12:28.031355Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7494331157373871735:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:12:28.032057Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/000a43/r3tmp/tmp4lkkFz/pdisk_1.dat 2025-04-17T17:12:31.017712Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:32.049649Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:33.008621Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7494331157373871735:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:12:33.009266Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:12:33.208863Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:38.279071Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:12:38.279158Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:12:38.351818Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:12:39.288622Z node 11 :BS_CONTROLLER ERROR: {BSC07@impl.h:2166} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.164765s 2025-04-17T17:12:39.288738Z node 11 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:666} StateWork event processing took too much time Type# 2146435078 Duration# 0.183799s TServer::EnableGrpc on GrpcPort 22624, node 11 2025-04-17T17:12:42.087335Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:42.156385Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7494331187438643326:2301];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:12:42.157988Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:12:42.301398Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:12:42.558270Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:12:42.558290Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:12:42.558298Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:12:42.558441Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:12:43.989205Z node 11 :BS_CONTROLLER ERROR: {BSC07@impl.h:2166} ProcessControllerEvent event processing took too much time Type# 2146435072 Duration# 0.104732s 2025-04-17T17:12:43.989236Z node 11 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:666} StateWork event processing took too much time Type# 2146435078 Duration# 0.104774s TClient is connected to server localhost:27071 TClient is connected to server localhost:27071 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 Schemeshard ... [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:14:52.896430Z node 41 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/pools/default TableId: [72057594046644480:8:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindResourcePool DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T17:14:52.896624Z node 41 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [41:7494331781612796757:2395], row count: 5, finished: 0 2025-04-17T17:14:52.898608Z node 41 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:14:52.903157Z node 41 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1 TableId: [72057594046644480:9:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [] }] } 2025-04-17T17:14:52.903236Z node 41 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [41:7494331781612796757:2395], row count: 1, finished: 0 2025-04-17T17:14:52.908161Z node 41 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:14:52.910512Z node 41 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table0 TableId: [72057594046644480:4:1] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T17:14:52.910577Z node 41 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [41:7494331781612796757:2395], row count: 1, finished: 0 2025-04-17T17:14:52.910704Z node 41 :SYSTEM_VIEWS INFO: Scan finished, actor: [41:7494331781612796757:2395], owner: [41:7494331781612796754:2393], scan id: 0, table id: [72057594046644480:1:0:auth_effective_permissions] 2025-04-17T17:14:52.912401Z node 41 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [41:7494331730073187301:2144], database# , query hash# 11342553055430868283, cpu time# 384193 2025-04-17T17:14:52.913087Z node 41 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744910092808, txId: 281474976710676] shutting down 2025-04-17T17:14:53.060324Z node 43 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector: TEvProcessOverloaded , top size by CPU # 0, top size by TLI # 0, time# 2025-04-17T17:14:53.060144Z 2025-04-17T17:14:53.082816Z node 41 :KQP_EXECUTER ERROR: TxId: 281474976710679. Ctx: { TraceId: 01js2by2mjdk166fpahe42d37b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=41&id=YmNiMGU0ZjYtOWRjYjlhNjItYmFlNDg1ODMtZjI5OWI0Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:14:53.085864Z node 41 :SYSTEM_VIEWS INFO: Scan started, actor: [41:7494331785907764110:2405], owner: [41:7494331785907764107:2403], scan id: 0, table id: [72075186224037888:1:0:auth_effective_permissions] 2025-04-17T17:14:53.100631Z node 41 :SYSTEM_VIEWS INFO: Scan prepared, actor: [41:7494331785907764110:2405], schemeshard id: 72075186224037888, hive id: 72057594037968897, database: /Root/Tenant1, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 2], database node count: 2 2025-04-17T17:14:53.100677Z node 41 :SYSTEM_VIEWS DEBUG: ProceedToScan, tenant name: /Root/Tenant1 tenant owner: root@builtin subject sid: empty require admin access: 0 is admin: 1 2025-04-17T17:14:53.100770Z node 41 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:14:53.101254Z node 41 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1 TableId: [72075186224037888:1:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037889 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037891 Mediators: 72075186224037892 SchemeShard: 72075186224037888 SysViewProcessor: 72075186224037893 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user2 }] Groups: [] } Children [Dir2,Table1] }] } 2025-04-17T17:14:53.101311Z node 41 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [41:7494331785907764110:2405], row count: 1, finished: 0 2025-04-17T17:14:53.108246Z node 41 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1/Dir2 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:14:53.116348Z node 41 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1/Dir2 TableId: [72075186224037888:3:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037889 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037891 Mediators: 72075186224037892 SchemeShard: 72075186224037888 SysViewProcessor: 72075186224037893 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [] }] } 2025-04-17T17:14:53.116444Z node 41 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [41:7494331785907764110:2405], row count: 2, finished: 0 2025-04-17T17:14:53.120242Z node 41 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1/Table1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:14:53.123021Z node 41 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1/Table1 TableId: [72075186224037888:2:1] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037889 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037891 Mediators: 72075186224037892 SchemeShard: 72075186224037888 SysViewProcessor: 72075186224037893 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T17:14:53.123089Z node 41 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [41:7494331785907764110:2405], row count: 1, finished: 0 2025-04-17T17:14:53.123459Z node 41 :SYSTEM_VIEWS INFO: Scan finished, actor: [41:7494331785907764110:2405], owner: [41:7494331785907764107:2403], scan id: 0, table id: [72075186224037888:1:0:auth_effective_permissions] 2025-04-17T17:14:53.126009Z node 41 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [41:7494331730073187301:2144], database# , query hash# 17325808444334437222, cpu time# 159054 2025-04-17T17:14:53.126653Z node 41 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744910093078, txId: 281474976710678] shutting down 2025-04-17T17:14:53.161660Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 43 2025-04-17T17:14:53.162606Z node 41 :HIVE WARN: HIVE#72057594037968897 Node(43, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T17:14:53.161823Z node 43 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:14:53.164253Z node 45 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:14:53.164727Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 44 2025-04-17T17:14:53.165029Z node 41 :HIVE WARN: HIVE#72057594037968897 Node(44, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T17:14:53.165144Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 42 2025-04-17T17:14:53.165949Z node 41 :HIVE WARN: HIVE#72057594037968897 Node(42, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T17:14:53.166052Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 45 2025-04-17T17:14:53.166619Z node 41 :HIVE WARN: HIVE#72057594037968897 Node(45, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T17:14:53.173429Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[42:7494331740253612127:2107], Type=268959746 2025-04-17T17:14:53.173478Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[42:7494331740253612127:2107], Type=268959746 2025-04-17T17:14:53.173502Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[42:7494331740253612127:2107], Type=268959746 2025-04-17T17:14:53.173526Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[42:7494331740253612127:2107], Type=268959746 2025-04-17T17:14:53.173553Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[42:7494331740253612127:2107], Type=268959746 2025-04-17T17:14:53.173586Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[42:7494331740253612127:2107], Type=268959746 >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder-EvWrite [GOOD] >> LocalPartitionReader::Booting [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> KqpProxy::NoUserAccessToScriptExecutionsTable [GOOD] Test command err: 2025-04-17T17:14:50.100571Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494331773056238673:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:50.100635Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/0021a8/r3tmp/tmpmvxyxI/pdisk_1.dat 2025-04-17T17:14:50.332645Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494331773186718869:2221];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:50.618253Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:14:51.130201Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:14:51.130271Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:14:51.131564Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:14:51.131612Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:14:51.133040Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:14:51.139860Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:14:51.141548Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:14:51.150485Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:14:51.151570Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17201 2025-04-17T17:14:55.108377Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494331773056238673:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:55.108457Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:14:55.214672Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-17T17:14:55.216156Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-17T17:14:55.217000Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-17T17:14:55.217031Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-17T17:14:55.217052Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-17T17:14:55.217125Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-17T17:14:55.217192Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:14:55.217828Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:14:55.217862Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:14:55.217883Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:14:55.276084Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494331773186718869:2221];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:55.276184Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:14:55.294197Z node 2 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-17T17:14:55.297809Z node 2 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-17T17:14:55.308139Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=NTk4NWUxMDctMjUwZDMxMjQtNmQzZDM4YzctNjAzNTNjZmU=, workerId: [2:7494331794661555492:2309], database: , longSession: 1, local sessions count: 1 2025-04-17T17:14:55.308208Z node 2 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-17T17:14:55.308513Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-17T17:14:55.309390Z node 2 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-17T17:14:55.309424Z node 2 :KQP_PROXY DEBUG: Updated table service config. 2025-04-17T17:14:55.309442Z node 2 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-17T17:14:55.309478Z node 2 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-17T17:14:55.309582Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:14:55.309638Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:14:55.309667Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:14:55.309776Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:14:55.309792Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:14:55.316482Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=2&id=NTk4NWUxMDctMjUwZDMxMjQtNmQzZDM4YzctNjAzNTNjZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 2, targetId: [2:8678280833929343339:121] 2025-04-17T17:14:55.316546Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 2 timeout: 600.000000s actor id: [1:7494331794531075981:2483] 2025-04-17T17:14:55.319895Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=2&id=NTk4NWUxMDctMjUwZDMxMjQtNmQzZDM4YzctNjAzNTNjZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [2:7494331794661555492:2309] 2025-04-17T17:14:55.319938Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 600.000000s actor id: [2:7494331794661555508:2120] 2025-04-17T17:14:55.334066Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331794531075982:2315], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:55.334251Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:55.356803Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494331794661555509:2310], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:55.356904Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:55.628441Z node 2 :KQP_PROXY DEBUG: TraceId: "01js2by4yr74akjqbpz0zpzfxy", Created new session, sessionId: ydb://session/3?node_id=2&id=NjhhMzI4NmMtYzM3NGQzYjgtOTRjMzE2NmYtY2U3OWMwNzc=, workerId: [2:7494331794661555520:2312], database: , longSession: 0, local sessions count: 2 2025-04-17T17:14:55.628718Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: 01js2by4yr74akjqbpz0zpzfxy, Database: , DatabaseId: , SessionId: ydb://session/3?node_id=2&id=NjhhMzI4NmMtYzM3NGQzYjgtOTRjMzE2NmYtY2U3OWMwNzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 4, targetId: [2:7494331794661555520:2312] 2025-04-17T17:14:55.628756Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 4 timeout: 300.000000s actor id: [2:7494331794661555521:2123] 2025-04-17T17:14:55.629332Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494331794661555522:2313], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:55.629407Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:55.629666Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494331794661555527:2316], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:55.662007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2025-04-17T17:14:55.706005Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494331794661555529:2317], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2025-04-17T17:14:55.945457Z node 2 :TX_PROXY ERROR: Actor# [2:7494331794661555559:2134] txid# 281474976710658, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:14:56.003209Z node 2 :KQP_PROXY DEBUG: TraceId: "01js2by4yr74akjqbpz0zpzfxy", Forwarded response to sender actor, requestId: 4, sender: [2:7494331794661555519:2311], selfId: [2:7494331773186718716:2078], source: [2:7494331794661555520:2312] 2025-04-17T17:14:56.004276Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=NjhhMzI4NmMtYzM3NGQzYjgtOTRjMzE2NmYtY2U3OWMwNzc=, workerId: [2:7494331794661555520:2312], local sessions count: 1 2025-04-17T17:14:56.009911Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [1:7494331773056238931:2281], selfId: [2:7494331773186718716:2078], source: [2:7494331794661555492:2309] 2025-04-17T17:14:56.012327Z node 1 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 2, sender: [1:7494331777351206699:2450], selfId: [1:7494331773056238931:2281], source: [2:7494331773186718716:2078] 2025-04-17T17:14:58.114621Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494331808671872353:2142];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:58.115089Z node ... 7:15:03.705261Z node 3 :KQP_PROXY DEBUG: Table result_sets updater. Request: create. Transaction completed: 281474976715661. Doublechecking... 2025-04-17T17:15:03.755985Z node 3 :KQP_PROXY DEBUG: Table result_sets updater. Column diff is empty, finishing 2025-04-17T17:15:03.846953Z node 3 :KQP_PROXY DEBUG: Table script_execution_leases updater. Request: create. Transaction completed: 281474976715660. Doublechecking... 2025-04-17T17:15:03.870692Z node 3 :KQP_PROXY DEBUG: Table script_executions updater. Request: create. Transaction completed: 281474976715659. Doublechecking... 2025-04-17T17:15:03.948269Z node 3 :KQP_PROXY DEBUG: Table script_execution_leases updater. Column diff is empty, finishing 2025-04-17T17:15:03.954916Z node 3 :KQP_PROXY DEBUG: Table script_executions updater. Column diff is empty, finishing 2025-04-17T17:15:03.957173Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 1163bf2e-b675b0d9-53241384-3ed8e03e, Bootstrap. Database: /Root 2025-04-17T17:15:03.957395Z node 3 :KQP_PROXY DEBUG: Request has 18444999163605.594244s seconds to be completed 2025-04-17T17:15:03.960200Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=N2JlOTNhMmItNTZkYjkyYjItMjAwMjkxMDEtOTA5NWFmMDg=, workerId: [3:7494331830146710266:2360], database: /Root, longSession: 1, local sessions count: 1 2025-04-17T17:15:03.960428Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-17T17:15:03.961758Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 1163bf2e-b675b0d9-53241384-3ed8e03e, RunDataQuery: -- TCreateScriptOperationQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $run_script_actor_id AS Text; DECLARE $execution_status AS Int32; DECLARE $execution_mode AS Int32; DECLARE $query_text AS Text; DECLARE $syntax AS Int32; DECLARE $meta AS JsonDocument; DECLARE $lease_duration AS Interval; DECLARE $execution_meta_ttl AS Interval; UPSERT INTO `.metadata/script_executions` (database, execution_id, run_script_actor_id, execution_status, execution_mode, start_ts, query_text, syntax, meta, expire_at) VALUES ($database, $execution_id, $run_script_actor_id, $execution_status, $execution_mode, CurrentUtcTimestamp(), $query_text, $syntax, $meta, CurrentUtcTimestamp() + $execution_meta_ttl); UPSERT INTO `.metadata/script_execution_leases` (database, execution_id, lease_deadline, lease_generation, expire_at) VALUES ($database, $execution_id, CurrentUtcTimestamp() + $lease_duration, 1, CurrentUtcTimestamp() + $execution_meta_ttl); 2025-04-17T17:15:03.962459Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /Root, DatabaseId: , SessionId: ydb://session/3?node_id=3&id=N2JlOTNhMmItNTZkYjkyYjItMjAwMjkxMDEtOTA5NWFmMDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 5, targetId: [3:7494331830146710266:2360] 2025-04-17T17:15:03.962497Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 5 timeout: 300.000000s actor id: [3:7494331830146710268:2955] 2025-04-17T17:15:03.963012Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494331830146710269:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:15:03.963092Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:15:03.964245Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494331830146710274:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:15:03.972722Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:2, at schemeshard: 72057594046644480 2025-04-17T17:15:04.001247Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494331830146710276:2366], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-04-17T17:15:04.066790Z node 3 :TX_PROXY ERROR: Actor# [3:7494331834441677639:3011] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:15:04.259416Z node 3 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:15:04.452076Z node 3 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 5, sender: [3:7494331830146710267:2361], selfId: [3:7494331808671872500:2281], source: [3:7494331830146710266:2360] 2025-04-17T17:15:04.452394Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 1163bf2e-b675b0d9-53241384-3ed8e03e, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=N2JlOTNhMmItNTZkYjkyYjItMjAwMjkxMDEtOTA5NWFmMDg=, TxId: 2025-04-17T17:15:04.452455Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 1163bf2e-b675b0d9-53241384-3ed8e03e, Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=N2JlOTNhMmItNTZkYjkyYjItMjAwMjkxMDEtOTA5NWFmMDg=, TxId: 2025-04-17T17:15:04.452473Z node 3 :KQP_PROXY DEBUG: [ScriptExecutions] Create script execution operation. ExecutionId: 1163bf2e-b675b0d9-53241384-3ed8e03e. Result: SUCCESS. Issues: 2025-04-17T17:15:04.455250Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=ZTFlMzdhYmYtNWUwMTAzNjEtN2U4NjQ3ZDgtYmVjYmQzOGY=, workerId: [3:7494331834441677721:2381], database: /Root, longSession: 1, local sessions count: 2 2025-04-17T17:15:04.455486Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-17T17:15:04.455995Z node 3 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=3&id=N2JlOTNhMmItNTZkYjkyYjItMjAwMjkxMDEtOTA5NWFmMDg=, workerId: [3:7494331830146710266:2360], local sessions count: 1 2025-04-17T17:15:04.456745Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: 01js2bycvsbqv1j0eyqcaxqnm7, Database: /Root, DatabaseId: , SessionId: ydb://session/3?node_id=3&id=ZTFlMzdhYmYtNWUwMTAzNjEtN2U4NjQ3ZDgtYmVjYmQzOGY=, CurrentExecutionId: 1163bf2e-b675b0d9-53241384-3ed8e03e, CustomerSuppliedId: 01js2bycvsbqv1j0eyqcaxqnm7, PoolId: }. TEvQueryRequest, set timer for: 604800.000000s timeout: 604800.000000s cancelAfter: 0.000000s. Send request to target, requestId: 7, targetId: [3:7494331834441677721:2381] 2025-04-17T17:15:04.456791Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 7 timeout: 604800.000000s actor id: [3:7494331834441677723:3061] 2025-04-17T17:15:04.487402Z node 3 :KQP_PROXY DEBUG: TraceId: "01js2bydx71yg74p30x4a8yyf6", Request has 18444999163605.064245s seconds to be completed 2025-04-17T17:15:04.489661Z node 3 :KQP_PROXY DEBUG: TraceId: "01js2bydx71yg74p30x4a8yyf6", Created new session, sessionId: ydb://session/3?node_id=3&id=YzZjMzY0YjYtYWI2ZTVjZGYtOWFjZDc0MTAtMTUwMGJkMDM=, workerId: [3:7494331834441677737:2387], database: /Root, longSession: 1, local sessions count: 2 2025-04-17T17:15:04.489837Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 01js2bydx71yg74p30x4a8yyf6 2025-04-17T17:15:04.504149Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: 01js2bydxq4sq0b5xxwk3396v0, Database: /Root, DatabaseId: , SessionId: ydb://session/3?node_id=3&id=YzZjMzY0YjYtYWI2ZTVjZGYtOWFjZDc0MTAtMTUwMGJkMDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 9, targetId: [3:7494331834441677737:2387] 2025-04-17T17:15:04.504199Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 9 timeout: 300.000000s actor id: [3:7494331834441677742:3071] 2025-04-17T17:15:04.544711Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 1163bf2e-b675b0d9-53241384-3ed8e03e, Bootstrap. Database: /Root 2025-04-17T17:15:04.549461Z node 3 :KQP_PROXY DEBUG: Request has 18444999163605.002182s seconds to be completed 2025-04-17T17:15:04.550879Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7494331834441677756:3078], for# user@builtin, access# DescribeSchema 2025-04-17T17:15:04.550924Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7494331834441677756:3078], for# user@builtin, access# DescribeSchema 2025-04-17T17:15:04.551722Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=MTY1ODUzNzktN2ViYTZiNDItNDBlNDQ0NDUtZmFjNTVjOTk=, workerId: [3:7494331834441677757:2392], database: /Root, longSession: 1, local sessions count: 3 2025-04-17T17:15:04.551888Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-17T17:15:04.551976Z node 3 :KQP_PROXY DEBUG: TraceId: "01js2bycvsbqv1j0eyqcaxqnm7", Forwarded response to sender actor, requestId: 7, sender: [3:7494331830146710263:2953], selfId: [3:7494331808671872500:2281], source: [3:7494331834441677721:2381] 2025-04-17T17:15:04.552266Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 1163bf2e-b675b0d9-53241384-3ed8e03e, RunDataQuery: -- TSaveScriptExecutionResultMetaQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_metas AS JsonDocument; UPDATE `.metadata/script_executions` SET result_set_metas = $result_set_metas WHERE database = $database AND execution_id = $execution_id; 2025-04-17T17:15:04.553168Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /Root, DatabaseId: , SessionId: ydb://session/3?node_id=3&id=MTY1ODUzNzktN2ViYTZiNDItNDBlNDQ0NDUtZmFjNTVjOTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 11, targetId: [3:7494331834441677757:2392] 2025-04-17T17:15:04.553211Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 11 timeout: 300.000000s actor id: [3:7494331834441677759:3079] 2025-04-17T17:15:04.577842Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7494331834441677743:2389], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/script_executions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:15:04.579633Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YzZjMzY0YjYtYWI2ZTVjZGYtOWFjZDc0MTAtMTUwMGJkMDM=, ActorId: [3:7494331834441677737:2387], ActorState: ExecuteState, TraceId: 01js2bydxq4sq0b5xxwk3396v0, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:15:04.579924Z node 3 :KQP_PROXY DEBUG: TraceId: "01js2bydxq4sq0b5xxwk3396v0", Forwarded response to sender actor, requestId: 9, sender: [3:7494331834441677741:2388], selfId: [3:7494331808671872500:2281], source: [3:7494331834441677737:2387] |76.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |76.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |76.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |76.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> TGRpcStreamingTest::WritesDoneFromClient [GOOD] |76.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Booting [GOOD] >> TGRpcStreamingTest::ReadFinish [GOOD] |76.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |76.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::WritesDoneFromClient [GOOD] Test command err: 2025-04-17T17:15:04.906006Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494331831223733113:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:15:04.906053Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/002671/r3tmp/tmpoTNdaw/pdisk_1.dat 2025-04-17T17:15:05.437596Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:15:05.455508Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:15:05.455625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:15:05.457862Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:15:05.621256Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ca80] stream accepted Name# Session ok# true peer# ipv6:[::1]:37672 2025-04-17T17:15:05.621618Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ca80] facade attach Name# Session actor# [1:7494331835518700812:2257] peer# ipv6:[::1]:37672 2025-04-17T17:15:05.621642Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ca80] facade read Name# Session peer# ipv6:[::1]:37672 2025-04-17T17:15:05.621902Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ca80] read finished Name# Session ok# false data# peer# ipv6:[::1]:37672 2025-04-17T17:15:05.621931Z node 1 :GRPC_SERVER DEBUG: Received TEvReadFinished, success = 0 2025-04-17T17:15:05.621970Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ca80] facade finish Name# Session peer# ipv6:[::1]:37672 grpc status# (9) message# Everything is A-OK 2025-04-17T17:15:05.624942Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ca80] stream done notification Name# Session ok# true peer# unknown 2025-04-17T17:15:05.624985Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ca80] stream finished Name# Session ok# true peer# unknown grpc status# (9) message# Everything is A-OK 2025-04-17T17:15:05.625011Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ca80] deregistering request Name# Session peer# unknown (finish done) 2025-04-17T17:15:05.625152Z node 1 :GRPC_SERVER DEBUG: Received TEvNotifiedWhenDone >> LocalPartitionReader::FeedSlowly |76.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |76.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::FeedSlowly [GOOD] >> LocalPartitionReader::Simple >> TestKinesisHttpProxy::TestRequestWithWrongRegion [GOOD] >> LocalPartitionReader::Simple [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder-EvWrite [GOOD] Test command err: 2025-04-17T17:13:10.301909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:13:10.302402Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:13:10.302462Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/00110d/r3tmp/tmpr6jTwX/pdisk_1.dat 2025-04-17T17:13:10.875781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:13:10.984443Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:13:11.057740Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:13:11.057871Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:13:11.087969Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:13:11.232225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:13:11.363366Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:13:11.373076Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:13:11.378283Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T17:13:11.381997Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:13:11.462461Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:13:11.463217Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:13:11.463322Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:13:11.465236Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:13:11.465310Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:13:11.465361Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:13:11.465750Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:13:11.466038Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:13:11.466435Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T17:13:11.478143Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:13:11.519885Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:13:11.520100Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:13:11.520261Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T17:13:11.520302Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:13:11.520336Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:13:11.520377Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:13:11.520608Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:11.520661Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:11.520937Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:13:11.521025Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:13:11.521170Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:13:11.521231Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:13:11.521296Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T17:13:11.521332Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:13:11.521379Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:13:11.521414Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:13:11.521457Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:13:11.521594Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:11.521631Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:11.521678Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T17:13:11.522025Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T17:13:11.522094Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:13:11.522205Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:13:11.522426Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T17:13:11.522488Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:13:11.522567Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:13:11.522616Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:13:11.522653Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T17:13:11.522685Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T17:13:11.522715Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:13:11.523002Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T17:13:11.523040Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T17:13:11.523070Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:13:11.523111Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:13:11.523163Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T17:13:11.523190Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:13:11.523226Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T17:13:11.523263Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:13:11.523303Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T17:13:11.537068Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T17:13:11.537143Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:13:11.547915Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:13:11.548001Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:13:11.548041Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:13:11.548079Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-17T17:13:11.548219Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T17:13:11.717085Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:11.717144Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:13:11.717183Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T17:13:11.717607Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-17T17:13:11.717639Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:13:11.717773Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:13:11.717817Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-17T17:13:11.717854Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-17T17:13:11.717894Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-17T17:13:11.723963Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T17:13:11.724048Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:13:11.724875Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:11.724921Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:13:11.724973Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:13:1 ... planned 0 immediate 0 planned 0 2025-04-17T17:15:06.565213Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-17T17:15:06.565287Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:15:06.565347Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:15:06.565698Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [15:885:2711], Recipient [15:885:2711]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:15:06.565750Z node 15 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:15:06.565817Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T17:15:06.565863Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:15:06.565905Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-17T17:15:06.565952Z node 15 :TX_DATASHARD DEBUG: Found ready operation [3500:281474976715666] in PlanQueue unit at 72075186224037889 2025-04-17T17:15:06.565995Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit PlanQueue 2025-04-17T17:15:06.566036Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-17T17:15:06.566070Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit PlanQueue 2025-04-17T17:15:06.566111Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit LoadTxDetails 2025-04-17T17:15:06.566149Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit LoadTxDetails 2025-04-17T17:15:06.566349Z node 15 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3500:281474976715666 keys extracted: 0 2025-04-17T17:15:06.566416Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-17T17:15:06.566451Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit LoadTxDetails 2025-04-17T17:15:06.566484Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-04-17T17:15:06.566520Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit BuildAndWaitDependencies 2025-04-17T17:15:06.566568Z node 15 :TX_DATASHARD TRACE: Operation [3500:281474976715666] is the new logically complete end at 72075186224037889 2025-04-17T17:15:06.566611Z node 15 :TX_DATASHARD TRACE: Operation [3500:281474976715666] is the new logically incomplete end at 72075186224037889 2025-04-17T17:15:06.566657Z node 15 :TX_DATASHARD TRACE: Activated operation [3500:281474976715666] at 72075186224037889 2025-04-17T17:15:06.566709Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-17T17:15:06.566735Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-04-17T17:15:06.566759Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CreateVolatileSnapshot 2025-04-17T17:15:06.566785Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CreateVolatileSnapshot 2025-04-17T17:15:06.566906Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is ExecutedNoMoreRestarts 2025-04-17T17:15:06.566937Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CreateVolatileSnapshot 2025-04-17T17:15:06.566989Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit DropVolatileSnapshot 2025-04-17T17:15:06.567039Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit DropVolatileSnapshot 2025-04-17T17:15:06.567069Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-17T17:15:06.567097Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit DropVolatileSnapshot 2025-04-17T17:15:06.567123Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CompleteOperation 2025-04-17T17:15:06.567152Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CompleteOperation 2025-04-17T17:15:06.567306Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is DelayComplete 2025-04-17T17:15:06.567348Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CompleteOperation 2025-04-17T17:15:06.567408Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CompletedOperations 2025-04-17T17:15:06.567462Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CompletedOperations 2025-04-17T17:15:06.567499Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-17T17:15:06.567526Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CompletedOperations 2025-04-17T17:15:06.567556Z node 15 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715666] at 72075186224037889 has finished 2025-04-17T17:15:06.567599Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:15:06.567638Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-17T17:15:06.567702Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-04-17T17:15:06.567742Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-04-17T17:15:06.588962Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3500} 2025-04-17T17:15:06.589124Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:15:06.589202Z node 15 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715666] at 72075186224037888 on unit CompleteOperation 2025-04-17T17:15:06.589313Z node 15 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715666] from 72075186224037888 at tablet 72075186224037888 send result to client [15:1090:2867], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T17:15:06.589416Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:15:06.589950Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3500} 2025-04-17T17:15:06.592546Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-17T17:15:06.592639Z node 15 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715666] at 72075186224037889 on unit CompleteOperation 2025-04-17T17:15:06.592710Z node 15 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715666] from 72075186224037889 at tablet 72075186224037889 send result to client [15:1090:2867], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T17:15:06.592783Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T17:15:06.606797Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [15:594:2519], Recipient [15:667:2571]: NKikimrTxDataShard.TEvRead ReadId: 3 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715666 } LockTxId: 1011121314 ResultFormat: FORMAT_ARROW KeysSize: 1 2025-04-17T17:15:06.607160Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-17T17:15:06.607298Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2025-04-17T17:15:06.607451Z node 15 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-17T17:15:06.607530Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2025-04-17T17:15:06.607606Z node 15 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-17T17:15:06.607691Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-17T17:15:06.607742Z node 15 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037888 2025-04-17T17:15:06.607812Z node 15 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-17T17:15:06.607848Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T17:15:06.607875Z node 15 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2025-04-17T17:15:06.607904Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2025-04-17T17:15:06.608094Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 3 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715666 } LockTxId: 1011121314 ResultFormat: FORMAT_ARROW } 2025-04-17T17:15:06.608676Z node 15 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 1011121314, counter# 18446744073709551615 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-17T17:15:06.608786Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3500/281474976715666 2025-04-17T17:15:06.608886Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:594:2519], 3} after executionsCount# 1 2025-04-17T17:15:06.608994Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:594:2519], 3} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T17:15:06.609315Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:594:2519], 3} finished in read 2025-04-17T17:15:06.609450Z node 15 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-17T17:15:06.609490Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2025-04-17T17:15:06.609522Z node 15 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T17:15:06.609555Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2025-04-17T17:15:06.609613Z node 15 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-17T17:15:06.609640Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T17:15:06.609683Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2025-04-17T17:15:06.609760Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-17T17:15:06.610002Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ReadFinish [GOOD] Test command err: 2025-04-17T17:15:05.622758Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494331838633568036:2131];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:15:05.625642Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/00266a/r3tmp/tmpuxFQXF/pdisk_1.dat 2025-04-17T17:15:06.209236Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:15:06.209315Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:15:06.210575Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:15:06.224322Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:15:06.415405Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream accepted Name# Session ok# true peer# ipv6:[::1]:36376 2025-04-17T17:15:06.415814Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade attach Name# Session actor# [1:7494331842928535788:2258] peer# ipv6:[::1]:36376 2025-04-17T17:15:06.415835Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade read Name# Session peer# ipv6:[::1]:36376 2025-04-17T17:15:06.415885Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade finish Name# Session peer# ipv6:[::1]:36376 grpc status# (0) message# 2025-04-17T17:15:06.416362Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] read finished Name# Session ok# false data# peer# ipv6:[::1]:36376 2025-04-17T17:15:06.416406Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream done notification Name# Session ok# true peer# ipv6:[::1]:36376 2025-04-17T17:15:06.416466Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream finished Name# Session ok# true peer# ipv6:[::1]:36376 grpc status# (0) message# 2025-04-17T17:15:06.416506Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] deregistering request Name# Session peer# ipv6:[::1]:36376 (finish done) 2025-04-17T17:15:06.416722Z node 1 :GRPC_SERVER DEBUG: Received TEvReadFinished, success = 0 |76.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> KqpProxy::NodeDisconnectedTest [GOOD] >> TVPatchTests::FindingPartsWhenPartsAreDontExist >> TopicAutoscaling::PartitionSplit_AutosplitByLoad [GOOD] >> TopicAutoscaling::PartitionSplit_AutosplitByLoad_AfterAlter |76.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::FeedSlowly [GOOD] >> DataShardReadTableSnapshots::ReadTableDropColumn >> TestKinesisHttpProxy::TestRequestWithIAM |76.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Simple [GOOD] >> TVPatchTests::FullPatchTest [GOOD] >> TVPatchTests::FullPatchTestSpecialCase1 [GOOD] >> TVPatchTests::FindingPartsWhenPartsAreDontExist [GOOD] >> TVPatchTests::FindingPartsWhenOnlyOnePartExists |76.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> TVPatchTests::FindingPartsWhenOnlyOnePartExists [GOOD] >> DataShardReadTableSnapshots::ReadTableSplitNewTxIdResolveResultReorder >> YdbProxy::OAuthToken [GOOD] |76.8%| [TA] $(B)/ydb/core/backup/impl/ut_local_partition_reader/test-results/unittest/{meta.json ... results_accumulator.log} |76.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |76.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |76.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FullPatchTestSpecialCase1 [GOOD] |76.8%| [TA] {RESULT} $(B)/ydb/core/backup/impl/ut_local_partition_reader/test-results/unittest/{meta.json ... results_accumulator.log} |76.8%| [LD] {RESULT} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut >> DataShardReadTableSnapshots::ReadTableDropColumnLatePropose >> DataShardReadTableSnapshots::ReadTableSnapshot >> TSubscriberTest::SyncWithOutdatedReplica >> TConsoleTests::TestAuthorization [GOOD] >> TConsoleTests::TestAuthorizationExtSubdomain >> YdbProxy::CopyTables [GOOD] >> YdbProxy::AlterTopic >> TFlatTableExecutor_ColumnGroups::TestManyRows [GOOD] >> TFlatTableExecutor_CompactionScan::TestCompactionScan >> DataShardReadTableSnapshots::ReadTableSplitBefore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FindingPartsWhenOnlyOnePartExists [GOOD] Test command err: Recv 65537 2025-04-17T17:15:10.862144Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-17T17:15:10.863909Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-04-17T17:15:10.863989Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts 2025-04-17T17:15:10.864101Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm Recv 65537 2025-04-17T17:15:11.161365Z node 2 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-17T17:15:11.161643Z node 2 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-04-17T17:15:11.161704Z node 2 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-04-17T17:15:11.161892Z node 2 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# yes 2025-04-17T17:15:11.161975Z node 2 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: received force end; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK ErrorReason# Send NKikimr::TEvBlobStorage::TEvVPatchResult 2025-04-17T17:15:11.162041Z node 2 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> KqpProxy::NodeDisconnectedTest [GOOD] Test command err: 2025-04-17T17:14:52.192884Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494331780187233853:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:52.193003Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/002187/r3tmp/tmpeFWQvV/pdisk_1.dat 2025-04-17T17:14:53.170938Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:14:53.171023Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:14:53.182143Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:14:53.196476Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:14:53.200631Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:23159 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T17:14:53.759248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:53.797959Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:14:56.693723Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-17T17:14:56.719298Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-17T17:14:56.723896Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=ZDZkOWQwZmEtYjBhOGVmNmQtYzE1NjcyMDAtMzIzNDM1ZmU=, workerId: [1:7494331797367103504:2311], database: , longSession: 0, local sessions count: 1 2025-04-17T17:14:56.723928Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-17T17:14:56.732327Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=1&id=ZDZkOWQwZmEtYjBhOGVmNmQtYzE1NjcyMDAtMzIzNDM1ZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.010000s timeout: 0.010000s cancelAfter: 0.000000s. Send request to target, requestId: 2, targetId: [1:7494331797367103504:2311] 2025-04-17T17:14:56.733239Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 2 timeout: 0.010000s actor id: [0:0:0] 2025-04-17T17:14:56.733490Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDZkOWQwZmEtYjBhOGVmNmQtYzE1NjcyMDAtMzIzNDM1ZmU=, ActorId: [1:7494331797367103504:2311], ActorState: ReadyState, Reply query error, msg:
: Error: SomeUniqTextForUt proxyRequestId: 2 2025-04-17T17:14:56.733562Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-17T17:14:56.733586Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-17T17:14:56.733602Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-17T17:14:56.733645Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-17T17:14:56.733757Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:14:56.733805Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:14:56.733821Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:14:56.734335Z node 1 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 2, sender: [1:7494331784482201577:2287], selfId: [1:7494331780187233948:2276], source: [1:7494331797367103504:2311] 2025-04-17T17:14:56.734356Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:14:56.734635Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:14:56.739317Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331797367103517:2312], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:56.739480Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:56.744646Z node 1 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(2) 2025-04-17T17:14:56.744688Z node 1 :KQP_PROXY DEBUG: Invalid request info while on request timeout handle. RequestId: 2 2025-04-17T17:15:04.771804Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:15:04.772771Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:15:04.772923Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:15:04.775237Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:15:04.775712Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:15:04.775772Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/002187/r3tmp/tmpDhx5Bd/pdisk_1.dat 2025-04-17T17:15:05.171883Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:29475 KQP PROXY1 [2:8678280833929343339:121] KQP PROXY2 [3:8678280833929343339:121] SENDER [2:1141:2687] 2025-04-17T17:15:05.576203Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=ODg1MWQxODItNWY3MGRlNWMtNzZjNzdmOTMtNTQ5NjY5MWY=, workerId: [3:1142:2375], database: , longSession: 1, local sessions count: 1 2025-04-17T17:15:05.576564Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: Created session ydb://session/3?node_id=3&id=ODg1MWQxODItNWY3MGRlNWMtNzZjNzdmOTMtNTQ5NjY5MWY= 2025-04-17T17:15:05.577452Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=3&id=ODg1MWQxODItNWY3MGRlNWMtNzZjNzdmOTMtNTQ5NjY5MWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 2, targetId: [3:8678280833929343339:121] 2025-04-17T17:15:05.577512Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 2 timeout: 0.001000s actor id: [0:0:0] 2025-04-17T17:15:05.577865Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=3&id=ODg1MWQxODItNWY3MGRlNWMtNzZjNzdmOTMtNTQ5NjY5MWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [3:1142:2375] 2025-04-17T17:15:05.577894Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 0.001000s actor id: [0:0:0] 2025-04-17T17:15:06.068913Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1143:2688], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:15:06.069136Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:15:06.069320Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1144:2376], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:15:06.069456Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:15:06.100421Z node 3 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(3) 2025-04-17T17:15:06.100553Z node 3 :KQP_PROXY DEBUG: Reply timeout: requestId 3 sessionId: ydb://session/3?node_id=3&id=ODg1MWQxODItNWY3MGRlNWMtNzZjNzdmOTMtNTQ5NjY5MWY= status: TIMEOUT round: 0 2025-04-17T17:15:06.100709Z node 2 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(2) 2025-04-17T17:15:06.100743Z node 2 :KQP_PROXY DEBUG: Reply timeout: requestId 2 sessionId: ydb://session/3?node_id=3&id=ODg1MWQxODItNWY3MGRlNWMtNzZjNzdmOTMtNTQ5NjY5MWY= status: TIMEOUT round: 0 2025-04-17T17:15:06.100881Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 2, sender: [2:1141:2687], selfId: [2:206:2171], source: [2:206:2171] 2025-04-17T17:15:06.101048Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ODg1MWQxODItNWY3MGRlNWMtNzZjNzdmOTMtNTQ5NjY5MWY=, ActorId: [3:1142:2375], ActorState: ExecuteState, TraceId: 01js2byezadpspp63dyr0v9k57, Create QueryResponse for error on request, msg: 2025-04-17T17:15:06.103636Z node 3 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [2:206:2171], selfId: [3:236:2127], source: [3:1142:2375] 2025-04-17T17:15:06.103889Z node 2 :KQP_PROXY ERROR: Unknown sender for proxy response, requestId: 2 2025-04-17T17:15:06.106661Z node 3 :KQP_PROXY DEBUG: Created new ... DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=YmMwODc3MDktOGIwNTIyNDMtNWM0MDY5YTAtNzM0MmFlYjM=, workerId: [3:1411:2518], database: , longSession: 1, local sessions count: 56 2025-04-17T17:15:08.885430Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-17T17:15:08.885791Z node 2 :KQP_PROXY DEBUG: Received ping session request, request_id: 57, sender: [2:1141:2687], trace_id: 2025-04-17T17:15:08.885902Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 57 timeout: 0.001000s actor id: [0:0:0] 2025-04-17T17:15:08.900425Z node 2 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(57) 2025-04-17T17:15:08.900501Z node 2 :KQP_PROXY DEBUG: Reply timeout: requestId 57 sessionId: ydb://session/3?node_id=3&id=YmMwODc3MDktOGIwNTIyNDMtNWM0MDY5YTAtNzM0MmFlYjM= status: TIMEOUT round: 0 2025-04-17T17:15:08.900608Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 57, sender: [2:1141:2687], selfId: [2:206:2171], source: [2:206:2171] 2025-04-17T17:15:08.902648Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=YmRmN2QwMjEtNmE5NWI3MGYtNjQ2MWY2ZTMtOWY2MTQ3Yjc=, workerId: [3:1412:2519], database: , longSession: 1, local sessions count: 57 2025-04-17T17:15:08.902815Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: Created session ydb://session/3?node_id=3&id=YmRmN2QwMjEtNmE5NWI3MGYtNjQ2MWY2ZTMtOWY2MTQ3Yjc= 2025-04-17T17:15:08.903333Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=3&id=YmRmN2QwMjEtNmE5NWI3MGYtNjQ2MWY2ZTMtOWY2MTQ3Yjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 58, targetId: [3:8678280833929343339:121] 2025-04-17T17:15:08.903390Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 58 timeout: 0.001000s actor id: [0:0:0] 2025-04-17T17:15:08.903819Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=3&id=YmRmN2QwMjEtNmE5NWI3MGYtNjQ2MWY2ZTMtOWY2MTQ3Yjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 87, targetId: [3:1412:2519] 2025-04-17T17:15:08.903866Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 87 timeout: 0.001000s actor id: [0:0:0] 2025-04-17T17:15:08.903968Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1413:2750], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:15:08.904081Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:15:09.006026Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1415:2520], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:15:09.006259Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:15:09.020624Z node 3 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(87) 2025-04-17T17:15:09.020710Z node 3 :KQP_PROXY DEBUG: Reply timeout: requestId 87 sessionId: ydb://session/3?node_id=3&id=YmRmN2QwMjEtNmE5NWI3MGYtNjQ2MWY2ZTMtOWY2MTQ3Yjc= status: TIMEOUT round: 0 2025-04-17T17:15:09.020815Z node 2 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(58) 2025-04-17T17:15:09.020845Z node 2 :KQP_PROXY DEBUG: Reply timeout: requestId 58 sessionId: ydb://session/3?node_id=3&id=YmRmN2QwMjEtNmE5NWI3MGYtNjQ2MWY2ZTMtOWY2MTQ3Yjc= status: TIMEOUT round: 0 2025-04-17T17:15:09.020998Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YmRmN2QwMjEtNmE5NWI3MGYtNjQ2MWY2ZTMtOWY2MTQ3Yjc=, ActorId: [3:1412:2519], ActorState: ExecuteState, TraceId: 01js2byj783656a109t4xqct2b, Create QueryResponse for error on request, msg: 2025-04-17T17:15:09.021153Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 58, sender: [2:1141:2687], selfId: [2:206:2171], source: [2:206:2171] 2025-04-17T17:15:09.023156Z node 3 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 87, sender: [2:206:2171], selfId: [3:236:2127], source: [3:1412:2519] 2025-04-17T17:15:09.023397Z node 2 :KQP_PROXY ERROR: Unknown sender for proxy response, requestId: 58 2025-04-17T17:15:09.025452Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=NjU2YjE0Y2YtNzNjZDQ3NGQtNzI1NDIyYjctZDliNzExMWM=, workerId: [3:1419:2523], database: , longSession: 1, local sessions count: 58 2025-04-17T17:15:09.025621Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-17T17:15:09.026042Z node 2 :KQP_PROXY DEBUG: Received ping session request, request_id: 59, sender: [2:1141:2687], trace_id: 2025-04-17T17:15:09.026157Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 59 timeout: 0.001000s actor id: [0:0:0] 2025-04-17T17:15:09.026318Z node 3 :KQP_PROXY DEBUG: Received ping session request, has local session: ydb://session/3?node_id=3&id=NjU2YjE0Y2YtNzNjZDQ3NGQtNzI1NDIyYjctZDliNzExMWM=, rpc ctrl: [0:0:0], sameNode: 0, trace_id: 2025-04-17T17:15:09.026436Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 59, sender: [2:1141:2687], selfId: [2:206:2171], source: [3:236:2127] 2025-04-17T17:15:09.040019Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=NDk3ZmY4MzctZjhkNDhhOTEtNTJiMDZkMzEtY2FkYzZmYzI=, workerId: [3:1420:2524], database: , longSession: 1, local sessions count: 59 2025-04-17T17:15:09.040263Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: Created session ydb://session/3?node_id=3&id=NDk3ZmY4MzctZjhkNDhhOTEtNTJiMDZkMzEtY2FkYzZmYzI= 2025-04-17T17:15:09.040922Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=3&id=NDk3ZmY4MzctZjhkNDhhOTEtNTJiMDZkMzEtY2FkYzZmYzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 60, targetId: [3:8678280833929343339:121] 2025-04-17T17:15:09.040979Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 60 timeout: 0.001000s actor id: [0:0:0] 2025-04-17T17:15:09.041432Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=3&id=NDk3ZmY4MzctZjhkNDhhOTEtNTJiMDZkMzEtY2FkYzZmYzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 90, targetId: [3:1420:2524] 2025-04-17T17:15:09.041475Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 90 timeout: 0.001000s actor id: [0:0:0] 2025-04-17T17:15:09.042869Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1421:2752], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:15:09.043074Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:15:09.133957Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1423:2525], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:15:09.134212Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:15:09.158655Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:15:09.158934Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:15:09.162357Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:15:09.162616Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:15:09.192493Z node 2 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(59) 2025-04-17T17:15:09.192557Z node 2 :KQP_PROXY DEBUG: Invalid request info while on request timeout handle. RequestId: 59 2025-04-17T17:15:09.192601Z node 3 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(90) 2025-04-17T17:15:09.192642Z node 3 :KQP_PROXY DEBUG: Reply timeout: requestId 90 sessionId: ydb://session/3?node_id=3&id=NDk3ZmY4MzctZjhkNDhhOTEtNTJiMDZkMzEtY2FkYzZmYzI= status: TIMEOUT round: 0 2025-04-17T17:15:09.192732Z node 2 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(60) 2025-04-17T17:15:09.192752Z node 2 :KQP_PROXY DEBUG: Reply timeout: requestId 60 sessionId: ydb://session/3?node_id=3&id=NDk3ZmY4MzctZjhkNDhhOTEtNTJiMDZkMzEtY2FkYzZmYzI= status: TIMEOUT round: 0 2025-04-17T17:15:09.192877Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NDk3ZmY4MzctZjhkNDhhOTEtNTJiMDZkMzEtY2FkYzZmYzI=, ActorId: [3:1420:2524], ActorState: ExecuteState, TraceId: 01js2byjbh4ktk2z2yswdmf90b, Create QueryResponse for error on request, msg: 2025-04-17T17:15:09.193002Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 60, sender: [2:1141:2687], selfId: [2:206:2171], source: [2:206:2171] 2025-04-17T17:15:09.195019Z node 3 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 90, sender: [2:206:2171], selfId: [3:236:2127], source: [3:1420:2524] 2025-04-17T17:15:09.195244Z node 2 :KQP_PROXY ERROR: Unknown sender for proxy response, requestId: 60 2025-04-17T17:15:09.209921Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=NzlkNDAzN2YtNDMyYTgxZTMtZjdiMzE3ZjItYTI5ZGM1NzU=, workerId: [3:1440:2528], database: , longSession: 1, local sessions count: 60 2025-04-17T17:15:09.210138Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-17T17:15:09.210589Z node 2 :KQP_PROXY DEBUG: Received ping session request, request_id: 61, sender: [2:1141:2687], trace_id: 2025-04-17T17:15:09.210717Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 61 timeout: 0.001000s actor id: [0:0:0] 2025-04-17T17:15:09.230405Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-17T17:15:09.231027Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:15:09.231477Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:15:09.244750Z node 2 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(61) 2025-04-17T17:15:09.244849Z node 2 :KQP_PROXY DEBUG: Reply timeout: requestId 61 sessionId: ydb://session/3?node_id=3&id=NzlkNDAzN2YtNDMyYTgxZTMtZjdiMzE3ZjItYTI5ZGM1NzU= status: TIMEOUT round: 0 2025-04-17T17:15:09.244992Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 61, sender: [2:1141:2687], selfId: [2:206:2171], source: [2:206:2171] >> KqpSnapshotRead::TestSnapshotExpiration+withSink [GOOD] >> TSubscriberTest::SyncWithOutdatedReplica [GOOD] >> TFlatTableExecutor_CompactionScan::TestCompactionScan [GOOD] >> TFlatTableExecutor_CompressedSelectRows::TestCompressedSelectRows [GOOD] >> TFlatTableExecutor_Exceptions::TestTabletExecuteExceptionDirect >> TPersQueueTest::ReadFromSeveralPartitions >> TFlatTableExecutor_Exceptions::TestTabletExecuteExceptionDirect [GOOD] >> TFlatTableExecutor_Exceptions::TestTabletExecuteExceptionEnqueue >> TPersQueueTest::DirectReadPreCached >> TopicService::OneConsumer_TheRangesDoNotOverlap >> DemoTx::Scenario_1 >> TFlatTableExecutor_Exceptions::TestTabletExecuteExceptionEnqueue [GOOD] >> TFlatTableExecutor_ExecutorTxLimit::TestExecutorTxLimit [GOOD] >> TFlatTableExecutor_Follower::BasicFollowerRead [GOOD] >> TFlatTableExecutor_Follower::FollowerEarlyRebootHoles >> TFlatTableExecutor_Follower::FollowerEarlyRebootHoles [GOOD] >> TFlatTableExecutor_Follower::FollowerAttachOnTxQueueScanSnapshot >> TPersQueueTest::SchemeshardRestart >> TFlatTableExecutor_Follower::FollowerAttachOnTxQueueScanSnapshot [GOOD] >> TFlatTableExecutor_Follower::FollowerAttachAfterLoan ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::OAuthToken [GOOD] Test command err: 2025-04-17T17:14:59.905419Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494331812098736274:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:59.905462Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/001eee/r3tmp/tmpVVZuvW/pdisk_1.dat 2025-04-17T17:15:00.714704Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:15:00.714792Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:15:00.719242Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:15:00.800482Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:8523 TServer::EnableGrpc on GrpcPort 7224, node 1 2025-04-17T17:15:01.453410Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:15:01.453437Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:15:01.453443Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:15:01.453564Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8523 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:15:02.436797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:15:02.464149Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:15:02.555102Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T17:15:06.208511Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494331841279649051:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:15:06.242028Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/001eee/r3tmp/tmpIY1bDi/pdisk_1.dat 2025-04-17T17:15:06.490778Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:15:06.533232Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:15:06.533323Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:15:06.536462Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14755 TServer::EnableGrpc on GrpcPort 16022, node 2 2025-04-17T17:15:07.116859Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:15:07.116881Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:15:07.116888Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:15:07.116990Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14755 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:15:07.780960Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:15:07.792853Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:15:07.837067Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::SyncWithOutdatedReplica [GOOD] Test command err: 2025-04-17T17:15:11.928467Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T17:15:11.930452Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 2] Version: 2 }: sender# [1:3:2050] 2025-04-17T17:15:11.930555Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [1:6:2053] 2025-04-17T17:15:11.930596Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [1:9:2056] 2025-04-17T17:15:11.930656Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 2] Version: 2 }: sender# [1:36:2066] 2025-04-17T17:15:11.930706Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [1:37:2066] 2025-04-17T17:15:11.930757Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:35:2066][path] Set up state: owner# [1:33:2064], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 2, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 2, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:15:11.930909Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [1:38:2066] 2025-04-17T17:15:11.930988Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:35:2066][path] Path was already updated: owner# [1:33:2064], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 2, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 2, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 2, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 2, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:15:11.931131Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:33:2064], cookie# 1 2025-04-17T17:15:11.931237Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:36:2066], cookie# 1 2025-04-17T17:15:11.931307Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:37:2066], cookie# 1 2025-04-17T17:15:11.931365Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:38:2066], cookie# 1 2025-04-17T17:15:11.931422Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:3:2050], cookie# 1 2025-04-17T17:15:11.931460Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:6:2053], cookie# 1 2025-04-17T17:15:11.931490Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:9:2056], cookie# 1 2025-04-17T17:15:11.931554Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:36:2066], cookie# 1 2025-04-17T17:15:11.931593Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-17T17:15:11.931628Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:37:2066], cookie# 1 2025-04-17T17:15:11.931680Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-17T17:15:11.931721Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:38:2066], cookie# 1 2025-04-17T17:15:11.931744Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Unexpected sync response: sender# [1:38:2066], cookie# 1 >> TFlatTableExecutor_Follower::FollowerAttachAfterLoan [GOOD] >> TFlatTableExecutor_Gc::TestFailedGcAfterReboot >> TPartitionWriterCacheActorTests::WriteReplyOrder |76.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |76.8%| [LD] {RESULT} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |76.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut >> TFlatTableExecutor_Gc::TestFailedGcAfterReboot [GOOD] >> TFlatTableExecutor_IndexLoading::CalculateReadSize_FlatIndex >> TPersQueueTest::WriteExisting >> Cdc::NaN[TopicRunner] [GOOD] >> Cdc::RacyRebootAndSplitWithTxInflight >> TPersQueueTest::BadTopic ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::TestSnapshotExpiration+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 24815, MsgBus: 6552 2025-04-17T17:14:45.685415Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494331749811211313:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:45.685870Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/0011c6/r3tmp/tmpd6lZGu/pdisk_1.dat 2025-04-17T17:14:46.244676Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:14:46.281592Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:14:46.281707Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:14:46.283018Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24815, node 1 2025-04-17T17:14:46.412282Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:14:46.412304Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:14:46.412311Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:14:46.412435Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6552 TClient is connected to server localhost:6552 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:14:47.132518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:47.165380Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:14:47.178349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:47.314413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:47.475790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:47.559526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:49.293907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331766991082158:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:49.294029Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:49.577946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:14:49.614031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:14:49.688201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:14:49.739401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:14:49.831440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:14:49.892779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:14:49.975614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:14:50.053334Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331771286050123:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:50.053413Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:50.053631Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331771286050128:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:50.058122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:14:50.073075Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494331771286050130:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:14:50.164990Z node 1 :TX_PROXY ERROR: Actor# [1:7494331771286050186:3552] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:14:50.660627Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494331749811211313:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:50.660684Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 18821, MsgBus: 27965 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/0011c6/r3tmp/tmpcwctgN/pdisk_1.dat 2025-04-17T17:14:54.488328Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:14:54.611254Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:14:54.638895Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:14:54.638985Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:14:54.645334Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18821, node 2 2025-04-17T17:14:54.754789Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:14:54.754814Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:14:54.754821Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:14:54.754940Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27965 TClient is connected to server localhost:27965 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T17:14:55.211194Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:14:55.219976Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:55.233692Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T17:14:55.337103Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:55.514628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:55.627535Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:57.744238Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494331804722041373:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:57.744334Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:57.800422Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T17:14:57.877099Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T17:14:57.948352Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T17:14:58.029346Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T17:14:58.126559Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T17:14:58.229932Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T17:14:58.376908Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T17:14:58.540901Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494331809017009349:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:58.540995Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:58.543317Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494331809017009354:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:58.547956Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T17:14:58.596332Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494331809017009356:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T17:14:58.694133Z node 2 :TX_PROXY ERROR: Actor# [2:7494331809017009415:3559] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:15:09.558517Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:15:09.558557Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:15:11.349159Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494331864851585187:2641], TxId: 281474976715681, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=OWEwMTA0My03MGJlMDZiZC1jNDlmOTBjZC1lYzIyODdhYQ==. CustomerSuppliedId : . TraceId : 01js2bymd483t7s7bzfjs0yw43. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Table id 2 has no snapshot at v1744910100094/18446744073709551615 shard 72075186224037888 with lowWatermark v1744910100563/18446744073709551615 (node# 2 state# Ready) } } 2025-04-17T17:15:11.349699Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494331864851585187:2641], TxId: 281474976715681, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=OWEwMTA0My03MGJlMDZiZC1jNDlmOTBjZC1lYzIyODdhYQ==. CustomerSuppliedId : . TraceId : 01js2bymd483t7s7bzfjs0yw43. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Table id 2 has no snapshot at v1744910100094/18446744073709551615 shard 72075186224037888 with lowWatermark v1744910100563/18446744073709551615 (node# 2 state# Ready) } }. 2025-04-17T17:15:11.350288Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494331864851585188:2642], TxId: 281474976715681, task: 2. Ctx: { TraceId : 01js2bymd483t7s7bzfjs0yw43. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=OWEwMTA0My03MGJlMDZiZC1jNDlmOTBjZC1lYzIyODdhYQ==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7494331864851585183:2514], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T17:15:11.350704Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OWEwMTA0My03MGJlMDZiZC1jNDlmOTBjZC1lYzIyODdhYQ==, ActorId: [2:7494331817606944333:2514], ActorState: ExecuteState, TraceId: 01js2bymd483t7s7bzfjs0yw43, Create QueryResponse for error on request, msg: >> AsyncIndexChangeExchange::ShouldNotReorderChangesOnRace [GOOD] >> Cdc::AreJsonsEqualReturnsTrueOnEqual [GOOD] >> Cdc::AreJsonsEqualReturnsFalseOnDifferent [GOOD] >> Cdc::AreJsonsEqualFailsOnWildcardInArray [GOOD] >> Cdc::AlterViaTopicService >> TPartitionWriterCacheActorTests::WriteReplyOrder [GOOD] >> TPartitionWriterCacheActorTests::DropOldWriter >> TPersQueueTest::SetupLockSession2 >> TFlatTableExecutor_IndexLoading::CalculateReadSize_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::CalculateReadSize_BTreeIndex >> TPersQueueMirrorer::ValidStartStream [GOOD] >> TSubscriberTest::Sync >> TPartitionWriterCacheActorTests::DropOldWriter [GOOD] >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError >> TFlatTableExecutor_IndexLoading::CalculateReadSize_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_FlatIndex >> TPersQueueTest::UpdatePartitionLocation >> TSubscriberTest::Sync [GOOD] >> TSubscriberTest::ReconnectOnFailure >> TSubscriberTest::StrongNotificationAfterCommit >> TSubscriberTest::ReconnectOnFailure [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::Sync [GOOD] Test command err: 2025-04-17T17:15:14.297904Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T17:15:14.299976Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2025-04-17T17:15:14.300091Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:6:2053] 2025-04-17T17:15:14.300239Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:9:2056] 2025-04-17T17:15:14.300326Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:36:2066] 2025-04-17T17:15:14.300383Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:37:2066] 2025-04-17T17:15:14.300436Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:35:2066][path] Set up state: owner# [1:33:2064], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:15:14.300571Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:38:2066] 2025-04-17T17:15:14.300642Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:35:2066][path] Path was already updated: owner# [1:33:2064], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:15:14.300767Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:33:2064], cookie# 1 2025-04-17T17:15:14.300890Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:36:2066], cookie# 1 2025-04-17T17:15:14.300995Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:37:2066], cookie# 1 2025-04-17T17:15:14.301038Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:38:2066], cookie# 1 2025-04-17T17:15:14.301097Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:3:2050], cookie# 1 2025-04-17T17:15:14.301138Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:6:2053], cookie# 1 2025-04-17T17:15:14.301166Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:9:2056], cookie# 1 2025-04-17T17:15:14.301233Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:36:2066], cookie# 1 2025-04-17T17:15:14.301278Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-17T17:15:14.301332Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:37:2066], cookie# 1 2025-04-17T17:15:14.301368Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-17T17:15:14.301418Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:38:2066], cookie# 1 2025-04-17T17:15:14.301448Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Unexpected sync response: sender# [1:38:2066], cookie# 1 >> TSubscriberTest::InvalidNotification >> TSubscriberTest::StrongNotificationAfterCommit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::ReconnectOnFailure [GOOD] Test command err: 2025-04-17T17:15:14.865878Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T17:15:14.869857Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-04-17T17:15:14.869980Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-04-17T17:15:14.870128Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:35:2065] 2025-04-17T17:15:14.870259Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:36:2065] 2025-04-17T17:15:14.870300Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][2:34:2065][path] Set up state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:15:14.870359Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:40:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2025-04-17T17:15:14.870453Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:37:2065] 2025-04-17T17:15:14.870504Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:15:14.870995Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:35:2065] 2025-04-17T17:15:14.871056Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:15:14.871095Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:36:2065] 2025-04-17T17:15:14.871123Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:15:14.871165Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:37:2065] 2025-04-17T17:15:14.871213Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:15:14.884393Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:45:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-04-17T17:15:14.884486Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:35:2065] 2025-04-17T17:15:14.884547Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:15:14.884666Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:46:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-04-17T17:15:14.884734Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:36:2065] 2025-04-17T17:15:14.884772Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:15:14.884854Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:47:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2025-04-17T17:15:14.884924Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:37:2065] 2025-04-17T17:15:14.884951Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:15:14.885525Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:45:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2025-04-17T17:15:14.885605Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [2:35:2065] 2025-04-17T17:15:14.885652Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][2:34:2065][path] Update to strong state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } >> TSubscriberTest::InvalidNotification [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::StrongNotificationAfterCommit [GOOD] Test command err: 2025-04-17T17:15:15.116962Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T17:15:15.119139Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-04-17T17:15:15.119253Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-04-17T17:15:15.119297Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2025-04-17T17:15:15.119358Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2065] 2025-04-17T17:15:15.119436Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:36:2065] 2025-04-17T17:15:15.119496Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:34:2065][path] Set up state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:15:15.119540Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2065] 2025-04-17T17:15:15.119573Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:15:15.119931Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-04-17T17:15:15.120005Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2065] 2025-04-17T17:15:15.120062Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:34:2065][path] Update to strong state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:15:15.120655Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-04-17T17:15:15.120735Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:36:2065] 2025-04-17T17:15:15.120793Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } >> YdbProxy::AlterTopic [GOOD] >> DataShardReadTableSnapshots::ReadTableDropColumn [GOOD] >> DataShardReadTableSnapshots::CorruptedDyNumber ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::InvalidNotification [GOOD] Test command err: 2025-04-17T17:15:15.633027Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T17:15:15.635055Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-04-17T17:15:15.635172Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-04-17T17:15:15.635217Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2025-04-17T17:15:15.635281Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2065] 2025-04-17T17:15:15.635359Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:36:2065] 2025-04-17T17:15:15.635420Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:34:2065][path] Set up state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:15:15.635464Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2065] 2025-04-17T17:15:15.637283Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:15:15.637473Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { PathId: [OwnerId: 1, LocalPathId: 1] Version: 0 }: sender# [1:33:2064] 2025-04-17T17:15:15.637514Z node 1 :SCHEME_BOARD_SUBSCRIBER ERROR: [main][1:34:2065][path] Suspicious NKikimrSchemeBoard.TEvNotify { PathId: [OwnerId: 1, LocalPathId: 1] Version: 0 }: sender# [1:33:2064] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TPersQueueMirrorer::ValidStartStream [GOOD] Test command err: 2025-04-17T17:12:11.705123Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494331091209344345:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:12:11.705162Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:12:14.651069Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/001739/r3tmp/tmpo1wRcS/pdisk_1.dat 2025-04-17T17:12:16.710089Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494331091209344345:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:12:16.710155Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:12:18.998251Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:18.998562Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:20.156424Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:20.156451Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:21.915130Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:21.915155Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:22.602267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:12:22.603004Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:12:22.658710Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:12:22.736633Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10892, node 1 2025-04-17T17:12:24.535920Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6vg8/001739/r3tmp/yandexYzrDyn.tmp 2025-04-17T17:12:24.558148Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6vg8/001739/r3tmp/yandexYzrDyn.tmp 2025-04-17T17:12:24.560277Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6vg8/001739/r3tmp/yandexYzrDyn.tmp 2025-04-17T17:12:24.560403Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:12:27.248027Z INFO: TTestServer started on Port 5367 GrpcPort 10892 TClient is connected to server localhost:5367 PQClient connected to localhost:10892 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-04-17T17:12:35.686356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:12:35.686380Z node 1 :IMPORT WARN: Table profiles were not loaded Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:12:35.839881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:12:36.139326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-17T17:12:46.681134Z node 1 :KQP_PROXY ERROR: TraceId: "01js2bsz3s6dpzegxysdc8e5rr", Request deadline has expired for 3.595000s seconds 2025-04-17T17:12:46.803057Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331241533200396:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:46.803398Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:46.808369Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331241533200409:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:46.813240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-17T17:12:46.840167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494331241533200411:2375], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-17T17:12:47.133296Z node 1 :TX_PROXY ERROR: Actor# [1:7494331241533200475:2515] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:12:47.166737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:12:47.207831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:12:47.317417Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494331245828167782:2382], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:12:47.319310Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDI0M2RjYTEtNjJlZmJhYjUtOWE2NzBlNTEtOGE2MTFjOTQ=, ActorId: [1:7494331241533200394:2369], ActorState: ExecuteState, TraceId: 01js2bt7akd0hmkaj051gycbsk, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:12:47.325184Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:12:47.468844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7494331245828168080:2702] === CheckClustersList. Ok 2025-04-17T17:12:52.837170Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-17T17:12:52.866099Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-17T17:12:52.867309Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7494331267303004776:2813], Recipient [1:7494331121274115539:2146]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:12:52.867332Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:12:52.867344Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-17T17:12:52.867378Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7494331267303004772:2810], Recipient [1:7494331121274115539:2146]: {TEvModifySchemeTransaction txid# 281474976710672 TabletId# 72057594046644480} 2025-04-17T17:12:52.867394Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-17T17:12:52.996263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 10 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710672 TabletId: 7 ... ssion cookie 2 consumer shared/user session shared/user_8_2_10244776478069112755_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 10 WriteTimestampMS: 1744910110213 CreateTimestampMS: 1744910110187 SizeLag: 1456 WriteTimestampEstimateMS: 1744910110560 ClientHasAnyCommits: false } Cookie: 18446744073709551615 } 2025-04-17T17:15:10.616403Z node 8 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_8_2_10244776478069112755_v1 INIT DONE TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 10 readOffset 0 committedOffset 0 2025-04-17T17:15:10.616501Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_10244776478069112755_v1 sending to client partition status 2025-04-17T17:15:10.628265Z :INFO: [] [] [16883016-6c034e1f-1d61c411-acd667cd] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "/topic1". Partition: 0. Read offset: 5 2025-04-17T17:15:10.632388Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_10244776478069112755_v1 grpc read done: success# 1, data# { start_partition_session_response { partition_session_id: 1 read_offset: 5 } } 2025-04-17T17:15:10.632614Z node 8 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_8_2_10244776478069112755_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 5, commitOffset# (empty maybe) 2025-04-17T17:15:10.632680Z node 8 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_8_2_10244776478069112755_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 10 readOffset 0 committedOffset 0 clientCommitOffset (empty maybe) clientReadOffset 5 2025-04-17T17:15:10.632708Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_10244776478069112755_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 5 endOffset 10 2025-04-17T17:15:10.632772Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_10244776478069112755_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 5, endOffset# 10, WTime# 1744910110213, sizeLag# 1456 2025-04-17T17:15:10.632790Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_10244776478069112755_v1TEvPartitionReady. Aval parts: 1 2025-04-17T17:15:10.632856Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_10244776478069112755_v1 performing read request: guid# bdec19f7-b5725903-965b329a-16098b61, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 6, size# 1747, partitionsAsked# 1, maxTimeLag# 0ms 2025-04-17T17:15:10.632930Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_10244776478069112755_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 6 maxSize 1747 maxTimeLagMs 0 readTimestampMs 0 readOffset 5 EndOffset 10 ClientCommitOffset 0 committedOffset 0 Guid bdec19f7-b5725903-965b329a-16098b61 2025-04-17T17:15:10.634404Z node 9 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-04-17T17:15:10.634447Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 0 2025-04-17T17:15:10.634567Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 2 Topic 'rt3.dc1--topic1' partition 0 user user offset 5 count 6 size 1747 endOffset 10 max time lag 0ms effective offset 5 2025-04-17T17:15:10.634597Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 2 added 0 blobs, size 0 count 0 last offset 5, current partition end offset: 10 2025-04-17T17:15:10.634760Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 2. All data is from uncompacted head. 2025-04-17T17:15:10.634784Z node 9 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-17T17:15:10.634932Z node 9 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 5 2025-04-17T17:15:10.640203Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_10244776478069112755_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 10 Result { Offset: 5 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 6 WriteTimestampMS: 1744910110338 CreateTimestampMS: 1744910110335 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } Result { Offset: 6 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 7 WriteTimestampMS: 1744910110348 CreateTimestampMS: 1744910110335 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } Result { Offset: 7 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 8 WriteTimestampMS: 1744910110368 CreateTimestampMS: 1744910110335 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } Result { Offset: 8 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 9 WriteTimestampMS: 1744910110420 CreateTimestampMS: 1744910110335 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } Result { Offset: 9 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 10 WriteTimestampMS: 1744910110428 CreateTimestampMS: 1744910110335 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 81 RealReadOffset: 9 WaitQuotaTimeMs: 0 EndOffset: 10 StartOffset: 0 } Cookie: 5 } 2025-04-17T17:15:10.640477Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_10244776478069112755_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset10 2025-04-17T17:15:10.640519Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_10244776478069112755_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 10 ReadOffset 10 ReadGuid bdec19f7-b5725903-965b329a-16098b61 has messages 1 2025-04-17T17:15:10.640672Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_10244776478069112755_v1 read done: guid# bdec19f7-b5725903-965b329a-16098b61, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 927 2025-04-17T17:15:10.640701Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_10244776478069112755_v1 response to read: guid# bdec19f7-b5725903-965b329a-16098b61 2025-04-17T17:15:10.640997Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_10244776478069112755_v1 Process answer. Aval parts: 0 2025-04-17T17:15:10.641841Z :DEBUG: [] [] [16883016-6c034e1f-1d61c411-acd667cd] [] Got ReadResponse, serverBytesSize = 927, now ReadSizeBudget = 0, ReadSizeServerDelta = 52427873 2025-04-17T17:15:10.642019Z :DEBUG: [] [] [16883016-6c034e1f-1d61c411-acd667cd] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52427873 2025-04-17T17:15:10.644678Z :DEBUG: [] Decompression task done. Partition/PartitionSessionId: 1 (5-9) 2025-04-17T17:15:10.644740Z :DEBUG: [] [] [16883016-6c034e1f-1d61c411-acd667cd] [] Returning serverBytesSize = 927 to budget 2025-04-17T17:15:10.644780Z :DEBUG: [] [] [16883016-6c034e1f-1d61c411-acd667cd] [] In ContinueReadingDataImpl, ReadSizeBudget = 927, ReadSizeServerDelta = 52427873 2025-04-17T17:15:10.645080Z :DEBUG: [] [] [16883016-6c034e1f-1d61c411-acd667cd] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-04-17T17:15:10.648363Z :DEBUG: [] Take Data. Partition 0. Read: {0, 0} (5-5) 2025-04-17T17:15:10.648446Z :DEBUG: [] Take Data. Partition 0. Read: {1, 0} (6-6) 2025-04-17T17:15:10.648484Z :DEBUG: [] Take Data. Partition 0. Read: {2, 0} (7-7) 2025-04-17T17:15:10.648516Z :DEBUG: [] Take Data. Partition 0. Read: {3, 0} (8-8) 2025-04-17T17:15:10.648595Z :DEBUG: [] Take Data. Partition 0. Read: {4, 0} (9-9) 2025-04-17T17:15:10.648653Z :DEBUG: [] [] [16883016-6c034e1f-1d61c411-acd667cd] [] The application data is transferred to the client. Number of messages 5, size 115 bytes 2025-04-17T17:15:10.648707Z :DEBUG: [] [] [16883016-6c034e1f-1d61c411-acd667cd] [] Returning serverBytesSize = 0 to budget 2025-04-17T17:15:10.648605Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_10244776478069112755_v1 grpc read done: success# 1, data# { read_request { bytes_size: 927 } } 2025-04-17T17:15:10.648731Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_10244776478069112755_v1 got read request: guid# 60ac061e-912db4fe-f33666ad-7dc2bed8 2025-04-17T17:15:10.648907Z :INFO: [] [] [16883016-6c034e1f-1d61c411-acd667cd] Closing read session. Close timeout: 0.000000s 2025-04-17T17:15:10.648958Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic1:0:1:9:0 2025-04-17T17:15:10.649009Z :INFO: [] [] [16883016-6c034e1f-1d61c411-acd667cd] Counters: { Errors: 0 CurrentSessionLifetimeMs: 110 BytesRead: 115 MessagesRead: 5 BytesReadCompressed: 115 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T17:15:10.649117Z :NOTICE: [] [] [16883016-6c034e1f-1d61c411-acd667cd] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-17T17:15:10.649198Z :DEBUG: [] [] [16883016-6c034e1f-1d61c411-acd667cd] [] Abort session to cluster 2025-04-17T17:15:10.649726Z :NOTICE: [] [] [16883016-6c034e1f-1d61c411-acd667cd] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-17T17:15:10.656930Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_10244776478069112755_v1 grpc read done: success# 0, data# { } 2025-04-17T17:15:10.656959Z node 8 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_8_2_10244776478069112755_v1 grpc read failed 2025-04-17T17:15:10.656991Z node 8 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_8_2_10244776478069112755_v1 grpc closed 2025-04-17T17:15:10.657028Z node 8 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_8_2_10244776478069112755_v1 is DEAD 2025-04-17T17:15:10.658244Z node 8 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic1] pipe [8:7494331857008135692:2540] disconnected; active server actors: 1 2025-04-17T17:15:10.658272Z node 8 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--topic1] pipe [8:7494331857008135692:2540] client user disconnected session shared/user_8_2_10244776478069112755_v1 2025-04-17T17:15:10.663035Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_8_2_10244776478069112755_v1 2025-04-17T17:15:10.663105Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [8:7494331857008135695:2543] destroyed 2025-04-17T17:15:10.663227Z node 9 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_8_2_10244776478069112755_v1 2025-04-17T17:15:10.668294Z :DEBUG: [] MessageGroupId [src-id-test] SessionId [src-id-test|ff8b24a8-82d06a5-d976641a-6252a3c6_0] Write session: destroy >> TCutHistoryRestrictions::BasicTest [GOOD] >> TCutHistoryRestrictions::EmptyAllowList [GOOD] >> TCutHistoryRestrictions::BothListsEmpty [GOOD] >> ObjectDistribution::TestImbalanceCalcualtion >> ObjectDistribution::TestImbalanceCalcualtion [GOOD] >> ObjectDistribution::TestAllowedDomainsAndDown >> ObjectDistribution::TestAllowedDomainsAndDown [GOOD] >> ObjectDistribution::TestAddSameNode [GOOD] >> ObjectDistribution::TestManyIrrelevantNodes >> TSubscriberTest::NotifyUpdate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::AlterTopic [GOOD] Test command err: 2025-04-17T17:14:57.748880Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494331803474086271:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:57.815223Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/001f0b/r3tmp/tmpTGcJ3N/pdisk_1.dat 2025-04-17T17:14:58.592773Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:14:58.592891Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:14:58.595760Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:14:58.618710Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12447 TServer::EnableGrpc on GrpcPort 62255, node 1 2025-04-17T17:14:59.268640Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:14:59.268660Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:14:59.268665Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:14:59.268756Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12447 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:14:59.976486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:15:00.010297Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:15:02.752239Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494331803474086271:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:15:02.752306Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:15:03.382799Z node 1 :TX_PROXY ERROR: Actor# [1:7494331829243890554:2311] txid# 281474976710658, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-04-17T17:15:03.399904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T17:15:05.024759Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494331833297244808:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:15:05.120875Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/001f0b/r3tmp/tmpW78bpl/pdisk_1.dat 2025-04-17T17:15:05.334193Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:15:05.369514Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:15:05.369595Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:15:05.372801Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10206 TServer::EnableGrpc on GrpcPort 12333, node 2 2025-04-17T17:15:06.188709Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:15:06.188741Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:15:06.188747Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:15:06.188852Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10206 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:15:06.730150Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:15:06.751249Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T17:15:09.988323Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494331833297244808:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:15:09.988392Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:15:10.706438Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T17:15:10.784139Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T17:15:12.022727Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494331867574043107:2127];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:15:12.022801Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/001f0b/r3tmp/tmp1s4x7Y/pdisk_1.dat 2025-04-17T17:15:12.217657Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:15:12.235398Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:15:12.235464Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:15:12.236766Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10525 TServer::EnableGrpc on GrpcPort 64044, node 3 2025-04-17T17:15:12.550103Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:15:12.550133Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:15:12.550143Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:15:12.550297Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10525 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:15:12.943762Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:15:12.955335Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T17:15:13.196030Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T17:15:13.238283Z node 3 :TX_PROXY ERROR: Actor# [3:7494331871869011132:2397] txid# 281474976715660, issues: { message: "Invalid retention period: specified: 31536000s, min: 1s, max: 2678400s" severity: 1 } >> DataShardReadTableSnapshots::ReadTableSplitBefore [GOOD] >> DataShardReadTableSnapshots::ReadTableSplitFinished >> TSubscriberCombinationsTest::MigratedPathRecreation >> TSubscriberTest::SyncPartial >> TSubscriberTest::NotifyUpdate [GOOD] >> DataShardReadTableSnapshots::ReadTableSnapshot [GOOD] >> DataShardReadTableSnapshots::ReadTableSplitAfter >> TopicAutoscaling::ReadFromTimestamp_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_OffsetCommit [GOOD] >> DataShardReadTableSnapshots::ReadTableSplitNewTxIdResolveResultReorder [GOOD] >> DataShardReadTableSnapshots::ReadTableUUID >> TConsoleTests::TestAuthorizationExtSubdomain [GOOD] >> TConsoleTests::TestAttributesExtSubdomain >> HttpRequest::ProbeServerless [GOOD] >> TSubscriberCombinationsTest::MigratedPathRecreation [GOOD] >> TSubscriberTest::Boot >> TSubscriberTest::SyncPartial [GOOD] >> DataShardReadTableSnapshots::ReadTableDropColumnLatePropose [GOOD] >> DataShardReadTableSnapshots::ReadTableMaxRows ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::NotifyUpdate [GOOD] Test command err: 2025-04-17T17:15:17.424890Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T17:15:17.427440Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-04-17T17:15:17.427557Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-04-17T17:15:17.427603Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2025-04-17T17:15:17.427680Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2065] 2025-04-17T17:15:17.427752Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:36:2065] 2025-04-17T17:15:17.427799Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:34:2065][path] Set up state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:15:17.427841Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2065] 2025-04-17T17:15:17.427874Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:15:17.428341Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2025-04-17T17:15:17.428525Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:35:2065] 2025-04-17T17:15:17.428586Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:34:2065][path] Update to strong state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } >> TSubscriberCombinationsTest::CombinationsRootDomain >> TSubscriberTest::Boot [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::SyncPartial [GOOD] Test command err: 2025-04-17T17:15:17.798888Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T17:15:17.801570Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-04-17T17:15:17.801728Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-04-17T17:15:17.801771Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2025-04-17T17:15:17.801838Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2065] 2025-04-17T17:15:17.801922Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:36:2065] 2025-04-17T17:15:17.801973Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:34:2065][path] Set up state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:15:17.802020Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2065] 2025-04-17T17:15:17.802056Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:15:17.802296Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:33:2064], cookie# 1 2025-04-17T17:15:17.802443Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:35:2065], cookie# 1 2025-04-17T17:15:17.802509Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:36:2065], cookie# 1 2025-04-17T17:15:17.802576Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:37:2065], cookie# 1 2025-04-17T17:15:17.802637Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:6:2053], cookie# 1 2025-04-17T17:15:17.802685Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:9:2056], cookie# 1 2025-04-17T17:15:17.802743Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:35:2065], cookie# 1 2025-04-17T17:15:17.802783Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 0, faulires# 1 2025-04-17T17:15:17.802823Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2065] 2025-04-17T17:15:17.802868Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:15:17.802906Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:36:2065], cookie# 1 2025-04-17T17:15:17.802939Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 1 2025-04-17T17:15:17.802973Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:37:2065], cookie# 1 2025-04-17T17:15:17.803006Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 1, partial# 0 2025-04-17T17:15:17.803122Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:33:2064], cookie# 2 2025-04-17T17:15:17.803212Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:35:2065], cookie# 2 2025-04-17T17:15:17.803234Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Sync is in progress: cookie# 2, size# 3, half# 1, successes# 0, faulires# 1 2025-04-17T17:15:17.803268Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:36:2065], cookie# 2 2025-04-17T17:15:17.803315Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:37:2065], cookie# 2 2025-04-17T17:15:17.803381Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:9:2056], cookie# 2 2025-04-17T17:15:17.803424Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:36:2065], cookie# 2 2025-04-17T17:15:17.803459Z node 1 :SCHEME_BOARD_SUBSCRIBER WARN: [main][1:34:2065][path] Sync is done: cookie# 2, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2025-04-17T17:15:17.803506Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:36:2065] 2025-04-17T17:15:17.803540Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:15:17.803577Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:37:2065], cookie# 2 2025-04-17T17:15:17.803609Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Unexpected sync response: sender# [1:37:2065], cookie# 2 2025-04-17T17:15:17.803739Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:33:2064], cookie# 3 2025-04-17T17:15:17.803818Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:35:2065], cookie# 3 2025-04-17T17:15:17.803840Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Sync is in progress: cookie# 3, size# 3, half# 1, successes# 0, faulires# 1 2025-04-17T17:15:17.803864Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:36:2065], cookie# 3 2025-04-17T17:15:17.803888Z node 1 :SCHEME_BOARD_SUBSCRIBER WARN: [main][1:34:2065][path] Sync is done: cookie# 3, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2025-04-17T17:15:17.803943Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:37:2065], cookie# 3 2025-04-17T17:15:17.804009Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:37:2065], cookie# 3 2025-04-17T17:15:17.804034Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Unexpected sync response: sender# [1:37:2065], cookie# 3 2025-04-17T17:15:17.804078Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2065] 2025-04-17T17:15:17.804497Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } >> TestKinesisHttpProxy::TestRequestWithIAM [GOOD] |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::Boot [GOOD] Test command err: 2025-04-17T17:15:17.743788Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:34:2065] 2025-04-17T17:15:17.743862Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Successful handshake: owner# 800, generation# 1 2025-04-17T17:15:17.744059Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:34:2065] 2025-04-17T17:15:17.744093Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Commit generation: owner# 800, generation# 1 2025-04-17T17:15:17.752506Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [1:35:2066] 2025-04-17T17:15:17.752582Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 900, generation# 1 2025-04-17T17:15:17.752946Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [1:35:2066] 2025-04-17T17:15:17.752989Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Commit generation: owner# 900, generation# 1 2025-04-17T17:15:17.753119Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/db/dir_inside] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T17:15:17.756452Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/db/dir_inside DomainOwnerId: 1 }: sender# [1:41:2068] 2025-04-17T17:15:17.756508Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Upsert description: path# /root/db/dir_inside 2025-04-17T17:15:17.756647Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Subscribe: subscriber# [1:41:2068], path# /root/db/dir_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-04-17T17:15:17.756858Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/db/dir_inside DomainOwnerId: 1 }: sender# [1:42:2068] 2025-04-17T17:15:17.756883Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# /root/db/dir_inside 2025-04-17T17:15:17.756919Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:42:2068], path# /root/db/dir_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-04-17T17:15:17.757029Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/db/dir_inside DomainOwnerId: 1 }: sender# [1:43:2068] 2025-04-17T17:15:17.757051Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:9:2056] Upsert description: path# /root/db/dir_inside 2025-04-17T17:15:17.757094Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:9:2056] Subscribe: subscriber# [1:43:2068], path# /root/db/dir_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-04-17T17:15:17.757182Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:3:2050] 2025-04-17T17:15:17.757252Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:41:2068] 2025-04-17T17:15:17.757314Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:42:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:6:2053] 2025-04-17T17:15:17.757353Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:42:2068] 2025-04-17T17:15:17.757408Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:43:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:9:2056] 2025-04-17T17:15:17.757446Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:43:2068] 2025-04-17T17:15:17.757540Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:38:2068] 2025-04-17T17:15:17.757635Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:39:2068] 2025-04-17T17:15:17.757672Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:37:2068][/root/db/dir_inside] Set up state: owner# [1:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:15:17.757728Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:40:2068] 2025-04-17T17:15:17.757765Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:37:2068][/root/db/dir_inside] Ignore empty state: owner# [1:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== !argsLeft.IsDeletion 2025-04-17T17:15:17.757991Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:34:2065], cookie# 0, event size# 118 2025-04-17T17:15:17.758033Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Update description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111], deletion# false 2025-04-17T17:15:17.765163Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Upsert description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111], pathDescription# {Status StatusSuccess, Path /root/db/dir_inside, PathId [OwnerId: 800, LocalPathId: 1111], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 1], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 67} 2025-04-17T17:15:17.765425Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside PathId: [OwnerId: 800, LocalPathId: 1111] Version: 1 }: sender# [1:3:2050] 2025-04-17T17:15:17.765494Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 1 }: sender# [1:41:2068] 2025-04-17T17:15:17.765564Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside PathId: [OwnerId: 800, LocalPathId: 1111] Version: 1 }: sender# [1:38:2068] 2025-04-17T17:15:17.765642Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:37:2068][/root/db/dir_inside] Update to strong state: owner# [1:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 800, LocalPathId: 1111], Version: 1) DomainId: [OwnerId: 800, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() < argsRight.GetSuperId() =========== !argsRight.IsDeletion 2025-04-17T17:15:17.765918Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 900 Generation: 1 }: sender# [1:35:2066], cookie# 0, event size# 117 2025-04-17T17:15:17.765961Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], deletion# false 2025-04-17T17:15:17.766076Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], pathDescription# {Status StatusSuccess, Path /root/db/dir_inside, PathId [OwnerId: 900, LocalPathId: 11], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 1], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 67} 2025-04-17T17:15:17.766203Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:42:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside PathId: [OwnerId: 900, LocalPathId: 11] Version: 1 }: sender# [1:6:2053] 2025-04-17T17:15:17.766258Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 1 }: sender# [1:42:2068] 2025-04-17T17:15:17.766324Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside PathId: [OwnerId: 900, LocalPathId: 11] Version: 1 }: sender# [1:39:2068] 2025-04-17T17:15:17.766393Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:37:2068][/root/db/dir_inside] Path was updated to new version: owner# [1:36:2067], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 800, LocalPathId: 1111], Version: 1) DomainId: [OwnerId: 800, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 900, LocalPathId: 11], Version: 1) DomainId: [OwnerId: 800, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:15:18.233200Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:34:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T17:15:18.233857Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:3:2050] 2025-04-17T17:15:18.233922Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:6:2053] 2025-04-17T17:15:18.233955Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:40:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:9:2056] 2025-04-17T17:15:18.234005Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:35:2065] 2025-04-17T17:15:18.234071Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:36:2065] 2025-04-17T17:15:18.234331Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:34:2065][path] Set up state: owner# [3:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:15:18.234400Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:37:2065] 2025-04-17T17:15:18.234465Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:34:2065][path] Ignore empty state: owner# [3:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } >> Cdc::AlterViaTopicService [GOOD] >> Cdc::Alter >> TestKinesisHttpProxy::TestRequestNoAuthorization ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::ReadFromTimestamp_PQv1 [GOOD] Test command err: 2025-04-17T17:12:21.419398Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494331134536252947:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:12:21.441296Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:12:25.216795Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/0016f3/r3tmp/tmp8yn4hK/pdisk_1.dat 2025-04-17T17:12:26.584011Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494331134536252947:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:12:26.588234Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:12:30.224562Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494331151716122396:2291];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:12:30.224601Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:12:32.027662Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:32.027683Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:33.036361Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:33.204708Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:33.288728Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:34.236443Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:35.017954Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:35.048337Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:35.702554Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:12:35.725492Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:35.726676Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:12:35.726771Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:12:35.755484Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6375, node 1 2025-04-17T17:12:37.226254Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6vg8/0016f3/r3tmp/yandexgpRUgo.tmp 2025-04-17T17:12:37.226274Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6vg8/0016f3/r3tmp/yandexgpRUgo.tmp 2025-04-17T17:12:37.227663Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6vg8/0016f3/r3tmp/yandexgpRUgo.tmp 2025-04-17T17:12:37.253965Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:12:39.407582Z INFO: TTestServer started on Port 19723 GrpcPort 6375 TClient is connected to server localhost:19723 PQClient connected to localhost:6375 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:12:45.945536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:12:46.343397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-17T17:12:47.028612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:12:47.028635Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:12:49.271801Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331254795338144:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:49.271906Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:49.271982Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331254795338165:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:49.275597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-17T17:12:49.287681Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494331254795338167:2381], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-17T17:12:49.533691Z node 1 :TX_PROXY ERROR: Actor# [1:7494331254795338231:2508] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:12:49.553992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:12:49.585475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:12:49.663959Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494331254795338239:2387], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:12:49.665738Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWViMTUzZmQtYWNjNmE0OTktNDBlYWMyMTItNTZjZTIzMDQ=, ActorId: [1:7494331254795338140:2373], ActorState: ExecuteState, TraceId: 01js2bt9v74mv521qv3mjt65e7, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:12:49.667717Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:12:49.726654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7494331259090305833:2692] === CheckClustersList. Ok 2025-04-17T17:12:55.233135Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-17T17:12:55.404952Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-17T17:12:55.408430Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7494331280565142555:2822], Recipient [1:7494331168895991656:2141]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:12:55.408462Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:12:55.410239Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-17T17:12:55.410271Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7494331280565142551:2819], Recipient [1:7494331168895991656:2141]: {TEvModifySchemeTransaction txid# 281474976710672 TabletId# 72057594046644480} 2025-04-17T17:12:55.410283Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-17T17:12:55.699436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 214748 ... ], Cookie: 0 2025-04-17T17:15:16.955180Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494331826306814582:2474]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:16.955206Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:16.955265Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:15:16.955333Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:15:16.955360Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:15:16.955393Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:15:16.955449Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494331877846423705:2863], Partition 2, Sender [0:0:0], Recipient [6:7494331877846423792:2876], Cookie: 0 2025-04-17T17:15:16.955489Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494331877846423792:2876]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:16.955504Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:16.955542Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:15:16.955577Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:15:16.955593Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:15:16.955612Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:15:16.955676Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494331877846423707:2864], Partition 1, Sender [0:0:0], Recipient [6:7494331877846423802:2879], Cookie: 0 2025-04-17T17:15:16.955717Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494331877846423802:2879]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:16.955731Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:16.955756Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:15:16.955786Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:15:16.955803Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:15:16.955822Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:15:16.988298Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 65538, Sender [0:0:0], Recipient [6:7494331822011847228:2469]: NActors::TEvents::TEvWakeup 2025-04-17T17:15:17.057731Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494331822011847228:2469], Partition 0, Sender [0:0:0], Recipient [6:7494331826306814582:2474], Cookie: 0 2025-04-17T17:15:17.057795Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494331826306814582:2474]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:17.057818Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:17.057854Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:15:17.057913Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:15:17.057933Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:15:17.057958Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:15:17.057997Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494331877846423705:2863], Partition 2, Sender [0:0:0], Recipient [6:7494331877846423792:2876], Cookie: 0 2025-04-17T17:15:17.058029Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494331877846423792:2876]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:17.058042Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:17.058057Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:15:17.058078Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:15:17.058087Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:15:17.058099Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:15:17.058127Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494331877846423707:2864], Partition 1, Sender [0:0:0], Recipient [6:7494331877846423802:2879], Cookie: 0 2025-04-17T17:15:17.058146Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494331877846423802:2879]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:17.058154Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:17.058167Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:15:17.058184Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:15:17.058194Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:15:17.058204Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:15:17.160399Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494331822011847228:2469], Partition 0, Sender [0:0:0], Recipient [6:7494331826306814582:2474], Cookie: 0 2025-04-17T17:15:17.160497Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494331826306814582:2474]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:17.160523Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:17.160571Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:15:17.160637Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:15:17.160663Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:15:17.160695Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:15:17.160755Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494331877846423705:2863], Partition 2, Sender [0:0:0], Recipient [6:7494331877846423792:2876], Cookie: 0 2025-04-17T17:15:17.160797Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494331877846423792:2876]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:17.160813Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:17.160853Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:15:17.160887Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:15:17.160904Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:15:17.160922Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:15:17.160962Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494331877846423707:2864], Partition 1, Sender [0:0:0], Recipient [6:7494331877846423802:2879], Cookie: 0 2025-04-17T17:15:17.161001Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494331877846423802:2879]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:17.161014Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:17.161040Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:15:17.161071Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:15:17.161089Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:15:17.161108Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:15:17.161466Z node 6 :PERSQUEUE TRACE: StateIdle event# 65538 (NActors::TEvents::TEvWakeup), Tablet [6:7494331822011847228:2469], Partition 0, Sender [0:0:0], Recipient [6:7494331826306814582:2474], Cookie: 0 2025-04-17T17:15:17.161511Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 65538, Sender [0:0:0], Recipient [6:7494331826306814582:2474]: NActors::TEvents::TEvWakeup 2025-04-17T17:15:17.161767Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 271188501, Sender [6:7494331826306814582:2474], Recipient [6:7494331822011847228:2469]: NKikimr::TEvPQ::TEvPartitionCounters 2025-04-17T17:15:17.161794Z node 6 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvPartitionCounters 2025-04-17T17:15:17.161814Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2025-04-17T17:15:17.162046Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 271188503, Sender [6:7494331826306814582:2474], Recipient [6:7494331822011847228:2469]: NKikimr::TEvPQ::TEvPartitionLabeledCounters 2025-04-17T17:15:17.162066Z node 6 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvPartitionLabeledCounters 2025-04-17T17:15:17.162118Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 271188503, Sender [6:7494331826306814582:2474], Recipient [6:7494331822011847228:2469]: NKikimr::TEvPQ::TEvPartitionLabeledCounters 2025-04-17T17:15:17.162131Z node 6 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvPartitionLabeledCounters ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::PartitionSplit_OffsetCommit [GOOD] Test command err: 2025-04-17T17:12:08.269846Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494331078951407626:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:12:08.306705Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:12:12.563441Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/00180c/r3tmp/tmpVc37UL/pdisk_1.dat 2025-04-17T17:12:14.116527Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494331078951407626:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:12:14.118928Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:12:14.119213Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:17.320469Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:17.320502Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:19.825603Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:12:19.825998Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:12:19.925051Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25576, node 1 2025-04-17T17:12:25.592969Z INFO: TTestServer started on Port 17504 GrpcPort 25576 2025-04-17T17:12:28.459412Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6vg8/00180c/r3tmp/yandex9zYwbC.tmp 2025-04-17T17:12:28.459427Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6vg8/00180c/r3tmp/yandex9zYwbC.tmp 2025-04-17T17:12:28.459549Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6vg8/00180c/r3tmp/yandex9zYwbC.tmp 2025-04-17T17:12:28.459605Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:12:28.491521Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:17504 PQClient connected to localhost:25576 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-04-17T17:12:36.730098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:12:36.730126Z node 1 :IMPORT WARN: Table profiles were not loaded Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:12:36.917555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:12:37.507581Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T17:12:37.543309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-17T17:12:47.822285Z node 1 :KQP_PROXY ERROR: TraceId: "01js2bt09920kdfg2nvhg7ya6a", Request deadline has expired for 3.363154s seconds 2025-04-17T17:12:47.865070Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331246455133039:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:47.865167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:47.865522Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331246455133073:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:47.869002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-17T17:12:47.888651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494331246455133075:2357], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-17T17:12:48.265957Z node 1 :TX_PROXY ERROR: Actor# [1:7494331246455133139:2506] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:12:48.411156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:12:48.530675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:12:48.548570Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494331250750100445:2364], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:12:48.550167Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDY0YzIyYjYtZTdkOTUyNTAtNmY4ZTUyNmItN2E4N2Q0ZjY=, ActorId: [1:7494331246455133033:2349], ActorState: ExecuteState, TraceId: 01js2bt8egcjzgt0rhfe4k1wf0, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:12:48.557143Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:12:48.635795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7494331255045068050:2693] === CheckClustersList. Ok 2025-04-17T17:12:54.106654Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-17T17:12:54.128878Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-17T17:12:54.130085Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7494331276519904756:2815], Recipient [1:7494331117606113711:2190]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:12:54.130124Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:12:54.130141Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-17T17:12:54.130179Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7494331276519904752:2812], Recipient [1:7494331117606113711:2190]: {TEvModifySchemeTransaction txid# 281474976710672 TabletId# 72057594046644480} 2025-04-17T17:12:54.130192Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-17T17:12:54.251582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710672 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-17T17:12:54.252616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T17:1 ... PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:15:16.933128Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:15:17.028318Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494331863484095539:2860], Partition 1, Sender [0:0:0], Recipient [6:7494331863484095618:2868], Cookie: 0 2025-04-17T17:15:17.028406Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494331863484095618:2868]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:17.028436Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:17.028486Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:15:17.028571Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:15:17.028598Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:15:17.028631Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:15:17.028702Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494331816239453754:2466], Partition 0, Sender [0:0:0], Recipient [6:7494331816239453817:2470], Cookie: 0 2025-04-17T17:15:17.028739Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494331816239453817:2470]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:17.028753Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:17.028780Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:15:17.028812Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:15:17.028827Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:15:17.028844Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:15:17.028883Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494331863484095538:2859], Partition 2, Sender [0:0:0], Recipient [6:7494331863484095616:2866], Cookie: 0 2025-04-17T17:15:17.028919Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494331863484095616:2866]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:17.028931Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:17.028952Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:15:17.028976Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:15:17.028990Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:15:17.029005Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:15:17.034809Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494331876368997892:2983], Partition 3, Sender [0:0:0], Recipient [6:7494331876368997983:2993], Cookie: 0 2025-04-17T17:15:17.034898Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494331876368997983:2993]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:17.034926Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:17.034981Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:15:17.035059Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:15:17.035085Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:15:17.035116Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:15:17.040033Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494331876368997891:2982], Partition 4, Sender [0:0:0], Recipient [6:7494331876368997980:2990], Cookie: 0 2025-04-17T17:15:17.040143Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494331876368997980:2990]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:17.040171Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:17.040240Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:15:17.040339Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:15:17.040376Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:15:17.040408Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:15:17.124758Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494331863484095539:2860], Partition 1, Sender [0:0:0], Recipient [6:7494331863484095618:2868], Cookie: 0 2025-04-17T17:15:17.124846Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494331863484095618:2868]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:17.124880Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:17.124944Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:15:17.125039Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:15:17.125066Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:15:17.125110Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:15:17.132302Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494331863484095538:2859], Partition 2, Sender [0:0:0], Recipient [6:7494331863484095616:2866], Cookie: 0 2025-04-17T17:15:17.132396Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494331863484095616:2866]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:17.132427Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:17.132477Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:15:17.132562Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:15:17.132589Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:15:17.132621Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:15:17.132673Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494331876368997892:2983], Partition 3, Sender [0:0:0], Recipient [6:7494331876368997983:2993], Cookie: 0 2025-04-17T17:15:17.132706Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494331876368997983:2993]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:17.132720Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:17.132750Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:15:17.132783Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:15:17.132797Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:15:17.132813Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:15:17.132862Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494331816239453754:2466], Partition 0, Sender [0:0:0], Recipient [6:7494331816239453817:2470], Cookie: 0 2025-04-17T17:15:17.132896Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494331816239453817:2470]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:17.132906Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:17.132926Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:15:17.132953Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:15:17.132966Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:15:17.132983Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:15:17.146372Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494331876368997891:2982], Partition 4, Sender [0:0:0], Recipient [6:7494331876368997980:2990], Cookie: 0 2025-04-17T17:15:17.146478Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494331876368997980:2990]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:17.146512Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:17.146576Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:15:17.146667Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:15:17.146697Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:15:17.146733Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> SystemView::AuthPermissions_Access [GOOD] >> Initializer::Simple |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> DataShardReadTableSnapshots::CorruptedDyNumber [GOOD] >> YdbTableSplit::SplitByLoadWithUpdates >> YdbTableSplit::MergeByNoLoadAfterSplit >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::ProbeServerless [GOOD] Test command err: 2025-04-17T17:11:57.656986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:11:57.657147Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:11:57.657298Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/0018ca/r3tmp/tmp2TuyQP/pdisk_1.dat 2025-04-17T17:11:58.295194Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12311, node 1 2025-04-17T17:12:00.566325Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:12:00.566380Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:12:00.566412Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:12:00.567120Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:12:00.581748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:12:00.746057Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:12:00.746150Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:12:00.773425Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25740 2025-04-17T17:12:01.596138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:12:06.678076Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T17:12:06.867460Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:12:06.871631Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:12:06.976968Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:12:06.996711Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:12:08.004924Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:12:08.005511Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:12:08.006028Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:12:08.006157Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:12:08.006227Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:12:08.007586Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:12:08.007660Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:12:08.007722Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:12:08.007776Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:12:08.449629Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:12:08.449720Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:12:08.479952Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:12:09.763297Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:12:10.061638Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T17:12:10.062316Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T17:12:10.202975Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T17:12:10.219996Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T17:12:10.230475Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T17:12:10.231201Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T17:12:10.232066Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T17:12:10.240494Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T17:12:10.241469Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T17:12:10.242197Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T17:12:10.247309Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T17:12:10.373223Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T17:12:10.374687Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1867:2595], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T17:12:10.458819Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1880:2607] 2025-04-17T17:12:10.484701Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1903:2616] 2025-04-17T17:12:10.485448Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1903:2616], schemeshard id = 72075186224037897 2025-04-17T17:12:10.754891Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-04-17T17:12:10.989536Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T17:12:10.989849Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T17:12:10.990495Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-04-17T17:12:11.107034Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T17:12:11.143702Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T17:12:11.144791Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T17:12:12.154586Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T17:12:13.677499Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T17:12:13.809244Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T17:12:15.242205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T17:12:19.365607Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:12:20.268205Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-04-17T17:12:20.268268Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-04-17T17:12:20.268351Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2588:2944], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-04-17T17:12:20.269059Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2589:2945] 2025-04-17T17:12:20.270270Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2589:2945], schemeshard id = 72075186224037899 2025-04-17T17:12:23.091863Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2720:3244], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:23.092001Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:23.135283Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-04-17T17:12:25.247052Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2865:3079];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T17:12:25.252972Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2865:3079];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T17:12:25.255499Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2865:3079];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T17:12:25.255933Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2865:3079];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T17:12:25.258904Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2865:3079];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T17:12:25.259094Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2865:3079];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T17:12:25.259199Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2865:3079];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T17:12:25.259617Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2865:3079];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T17:12:25.260413Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2865:3079];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T17 ... 86224037894] TTxAnalyzeTableResponse::Complete. 2025-04-17T17:15:15.343448Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-17T17:15:15.343474Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-17T17:15:15.343495Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-17T17:15:15.343517Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-17T17:15:15.343538Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-17T17:15:15.343559Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-17T17:15:15.343596Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-17T17:15:15.343618Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-17T17:15:16.268986Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T17:15:16.269102Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is column table. 2025-04-17T17:15:16.269143Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-17T17:15:16.269634Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-17T17:15:16.289480Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-17T17:15:16.289859Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-17T17:15:16.289924Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-17T17:15:16.290389Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-17T17:15:16.323323Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-17T17:15:16.323483Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-17T17:15:16.324349Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9849:7400], server id = [2:9854:7405], tablet id = 72075186224037905, status = OK 2025-04-17T17:15:16.324440Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9849:7400], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-17T17:15:16.325651Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9850:7401], server id = [2:9855:7406], tablet id = 72075186224037906, status = OK 2025-04-17T17:15:16.325720Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9850:7401], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-17T17:15:16.326000Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9851:7402], server id = [2:9856:7407], tablet id = 72075186224037907, status = OK 2025-04-17T17:15:16.326053Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9851:7402], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-17T17:15:16.327133Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9852:7403], server id = [2:9857:7408], tablet id = 72075186224037908, status = OK 2025-04-17T17:15:16.327191Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9852:7403], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-17T17:15:16.327569Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9853:7404], server id = [2:9859:7410], tablet id = 72075186224037909, status = OK 2025-04-17T17:15:16.327618Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9853:7404], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-17T17:15:16.328502Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-17T17:15:16.329221Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9849:7400], server id = [2:9854:7405], tablet id = 72075186224037905 2025-04-17T17:15:16.329255Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T17:15:16.329705Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-17T17:15:16.330427Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-17T17:15:16.330533Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9850:7401], server id = [2:9855:7406], tablet id = 72075186224037906 2025-04-17T17:15:16.330559Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T17:15:16.330853Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-17T17:15:16.331237Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037909 2025-04-17T17:15:16.331355Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9864:7415], server id = [2:9865:7416], tablet id = 72075186224037910, status = OK 2025-04-17T17:15:16.331427Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9864:7415], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-17T17:15:16.332232Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9851:7402], server id = [2:9856:7407], tablet id = 72075186224037907 2025-04-17T17:15:16.332262Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T17:15:16.332561Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9852:7403], server id = [2:9857:7408], tablet id = 72075186224037908 2025-04-17T17:15:16.332587Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T17:15:16.332987Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9866:7417], server id = [2:9868:7419], tablet id = 72075186224037911, status = OK 2025-04-17T17:15:16.333051Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9866:7417], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-17T17:15:16.333607Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9853:7404], server id = [2:9859:7410], tablet id = 72075186224037909 2025-04-17T17:15:16.333636Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T17:15:16.333811Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9867:7418], server id = [2:9870:7421], tablet id = 72075186224037912, status = OK 2025-04-17T17:15:16.333862Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9867:7418], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-17T17:15:16.334439Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9869:7420], server id = [2:9872:7423], tablet id = 72075186224037913, status = OK 2025-04-17T17:15:16.334490Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9869:7420], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-17T17:15:16.334621Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9871:7422], server id = [2:9874:7425], tablet id = 72075186224037914, status = OK 2025-04-17T17:15:16.334664Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9871:7422], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-17T17:15:16.335787Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037910 2025-04-17T17:15:16.335900Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037911 2025-04-17T17:15:16.336342Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9864:7415], server id = [2:9865:7416], tablet id = 72075186224037910 2025-04-17T17:15:16.336372Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T17:15:16.336833Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9866:7417], server id = [2:9868:7419], tablet id = 72075186224037911 2025-04-17T17:15:16.336861Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T17:15:16.337124Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037912 2025-04-17T17:15:16.337344Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037913 2025-04-17T17:15:16.337522Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037914 2025-04-17T17:15:16.337563Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-17T17:15:16.337712Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-17T17:15:16.337865Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-17T17:15:16.338079Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-17T17:15:16.340378Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9867:7418], server id = [2:9870:7421], tablet id = 72075186224037912 2025-04-17T17:15:16.340411Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T17:15:16.340880Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9869:7420], server id = [2:9872:7423], tablet id = 72075186224037913 2025-04-17T17:15:16.340910Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T17:15:16.341083Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-17T17:15:16.341670Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9871:7422], server id = [2:9874:7425], tablet id = 72075186224037914 2025-04-17T17:15:16.341698Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T17:15:16.363987Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZTk4ZmY2NWYtMmFhYzQyZmItNTI5YmQ1ZjctYzk5OWZh, TxId: 2025-04-17T17:15:16.364050Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZTk4ZmY2NWYtMmFhYzQyZmItNTI5YmQ1ZjctYzk5OWZh, TxId: 2025-04-17T17:15:16.364661Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T17:15:16.379875Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-17T17:15:16.379934Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=DXm X S1, ActorId=[1:4593:3494] 2025-04-17T17:15:16.380920Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:9901:5728]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-17T17:15:16.381108Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T17:15:16.381188Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-17T17:15:16.381370Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T17:15:16.381420Z node 1 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-04-17T17:15:16.381473Z node 1 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 2 ] 2025-04-17T17:15:16.396267Z node 1 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 Answer: '/Root/Database/Table1[Value]=4' >> BasicStatistics::TwoTables [GOOD] >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::CorruptedDyNumber [GOOD] Test command err: 2025-04-17T17:15:13.987980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:15:13.988475Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:15:13.988527Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/00225c/r3tmp/tmpxWPwxV/pdisk_1.dat 2025-04-17T17:15:14.377290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:15:14.416826Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:15:14.460958Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T17:15:14.461831Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-17T17:15:14.461997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:15:14.462102Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:15:14.473624Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:15:14.557986Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-17T17:15:14.558051Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-17T17:15:14.558199Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-04-17T17:15:14.673235Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-17T17:15:14.673332Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T17:15:14.673854Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T17:15:14.673959Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T17:15:14.674257Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T17:15:14.674425Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T17:15:14.674504Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-17T17:15:14.676145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:15:14.676549Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-04-17T17:15:14.677151Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-17T17:15:14.677211Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-17T17:15:14.706377Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:15:14.707142Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:15:14.707487Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T17:15:14.707702Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:15:14.745275Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:15:14.746028Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:15:14.746135Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:15:14.747763Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:15:14.747849Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:15:14.747900Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:15:14.749835Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:15:14.749980Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:15:14.750070Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T17:15:14.760992Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:15:14.818569Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:15:14.818784Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:15:14.818916Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T17:15:14.818952Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:15:14.818986Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:15:14.819022Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:15:14.819270Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:15:14.819318Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:15:14.819643Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:15:14.819754Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:15:14.819875Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:15:14.819918Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:15:14.819952Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T17:15:14.820008Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:15:14.820063Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:15:14.820094Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:15:14.820183Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:15:14.820313Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:15:14.820351Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:15:14.820395Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T17:15:14.820775Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T17:15:14.820826Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:15:14.820936Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:15:14.821121Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T17:15:14.821180Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:15:14.821290Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:15:14.821340Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:15:14.821384Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T17:15:14.821414Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T17:15:14.821457Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:15:14.821737Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T17:15:14.821778Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T17:15:14.821811Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:15:14.821854Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:15:14.821906Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T17:15:14.821936Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:15:14.821967Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T17:15:14.821996Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:15:14.822025Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T17:15:14.823460Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T17:15:14.823509Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:15:14.835032Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Comple ... 281474976715659] at 72075186224037888 is Executed 2025-04-17T17:15:20.534855Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit MakeScanSnapshot 2025-04-17T17:15:20.534890Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715659] at 72075186224037888 to execution unit WaitForStreamClearance 2025-04-17T17:15:20.534912Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715659] at 72075186224037888 on unit WaitForStreamClearance 2025-04-17T17:15:20.534967Z node 2 :TX_DATASHARD TRACE: Requested stream clearance from [2:745:2626] for [0:281474976715659] at 72075186224037888 2025-04-17T17:15:20.534997Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715659] at 72075186224037888 is Continue 2025-04-17T17:15:20.535053Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:15:20.535201Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287427, Sender [2:666:2570], Recipient [2:745:2626]: NKikimrTx.TEvStreamClearanceRequest TxId: 281474976715659 ShardId: 72075186224037888 KeyRange { From: "\001\000\000\000\000\200" To: "" FromInclusive: true ToInclusive: false } 2025-04-17T17:15:20.535240Z node 2 :TX_PROXY DEBUG: [ReadTable [2:745:2626] TxId# 281474976715658] Received TEvStreamClearanceRequest from ShardId# 72075186224037888 2025-04-17T17:15:20.535293Z node 2 :TX_PROXY DEBUG: [ReadTable [2:745:2626] TxId# 281474976715658] Sending TEvStreamClearanceResponse to [2:666:2570] ShardId# 72075186224037888 2025-04-17T17:15:20.535475Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287942, Sender [2:745:2626], Recipient [2:666:2570]: NKikimrTx.TEvStreamClearancePending TxId: 281474976715659 2025-04-17T17:15:20.535514Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearancePending 2025-04-17T17:15:20.535655Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287940, Sender [2:745:2626], Recipient [2:666:2570]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715659 Cleared: true 2025-04-17T17:15:20.535689Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2025-04-17T17:15:20.535797Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:666:2570], Recipient [2:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:15:20.535825Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:15:20.535876Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:15:20.535910Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-17T17:15:20.535953Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715659] at 72075186224037888 for WaitForStreamClearance 2025-04-17T17:15:20.535993Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715659] at 72075186224037888 on unit WaitForStreamClearance 2025-04-17T17:15:20.536046Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715659] at 72075186224037888 2025-04-17T17:15:20.536091Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715659] at 72075186224037888 is Executed 2025-04-17T17:15:20.536426Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit WaitForStreamClearance 2025-04-17T17:15:20.536463Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715659] at 72075186224037888 to execution unit ReadTableScan 2025-04-17T17:15:20.536495Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715659] at 72075186224037888 on unit ReadTableScan 2025-04-17T17:15:20.536750Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715659] at 72075186224037888 is Continue 2025-04-17T17:15:20.536776Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-17T17:15:20.536813Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-17T17:15:20.536842Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:15:20.536868Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:15:20.536918Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:15:20.537501Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:779:2647], Recipient [2:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-04-17T17:15:20.537542Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-04-17T17:15:20.537638Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:779:2647], Recipient [2:745:2626]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715659 ShardId: 72075186224037888 2025-04-17T17:15:20.537671Z node 2 :TX_PROXY DEBUG: [ReadTable [2:745:2626] TxId# 281474976715658] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2025-04-17T17:15:20.537983Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287941, Sender [2:744:2626], Recipient [2:745:2626]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715658 MessageSizeLimit: 1 ReservedMessages: 1 2025-04-17T17:15:20.538045Z node 2 :TX_PROXY DEBUG: [ReadTable [2:745:2626] TxId# 281474976715658] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-04-17T17:15:20.538102Z node 2 :TX_PROXY DEBUG: [ReadTable [2:745:2626] TxId# 281474976715658] Reserving quota 1 messages for ShardId# 72075186224037888 2025-04-17T17:15:20.538180Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 1 2025-04-17T17:15:20.538290Z node 2 :TX_DATASHARD ERROR: Got scan fatal error: Invalid DyNumber binary representation 2025-04-17T17:15:20.538347Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 1 2025-04-17T17:15:20.538531Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-17T17:15:20.538568Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715659, at: 72075186224037888 2025-04-17T17:15:20.538688Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287429, Sender [2:779:2647], Recipient [2:745:2626]: NKikimrTx.TEvStreamQuotaRelease TxId: 281474976715659 ShardId: 72075186224037888 2025-04-17T17:15:20.538741Z node 2 :TX_PROXY DEBUG: [ReadTable [2:745:2626] TxId# 281474976715658] Received TEvStreamQuotaRelease from ShardId# 72075186224037888 2025-04-17T17:15:20.538786Z node 2 :TX_PROXY DEBUG: [ReadTable [2:745:2626] TxId# 281474976715658] Released quota 1 reserved messages from ShardId# 72075186224037888 2025-04-17T17:15:20.538924Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:666:2570], Recipient [2:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:15:20.538974Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:15:20.539043Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:15:20.539085Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-17T17:15:20.539132Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715659] at 72075186224037888 for ReadTableScan 2025-04-17T17:15:20.539172Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715659] at 72075186224037888 on unit ReadTableScan 2025-04-17T17:15:20.539218Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715659] at 72075186224037888 error: Invalid DyNumber binary representation, IsFatalError: 1 2025-04-17T17:15:20.539278Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715659] at 72075186224037888 is Executed 2025-04-17T17:15:20.539319Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit ReadTableScan 2025-04-17T17:15:20.539360Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715659] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:15:20.539393Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715659] at 72075186224037888 on unit FinishPropose 2025-04-17T17:15:20.539444Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715659] at 72075186224037888 is DelayComplete 2025-04-17T17:15:20.539484Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:15:20.539534Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715659] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T17:15:20.539565Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715659] at 72075186224037888 on unit CompletedOperations 2025-04-17T17:15:20.539607Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715659] at 72075186224037888 is Executed 2025-04-17T17:15:20.539630Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T17:15:20.539670Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715659] at 72075186224037888 has finished 2025-04-17T17:15:20.539706Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:15:20.539761Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-17T17:15:20.539810Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:15:20.539847Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:15:20.539910Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:15:20.539947Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715659] at 72075186224037888 on unit FinishPropose 2025-04-17T17:15:20.539995Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715659 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: EXEC_ERROR 2025-04-17T17:15:20.540053Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715659 at tablet 72075186224037888 status: EXEC_ERROR errors: PROGRAM_ERROR (Invalid DyNumber binary representation) | 2025-04-17T17:15:20.540153Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:15:20.540450Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:666:2570], Recipient [2:745:2626]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037888 Status: EXEC_ERROR Error { Kind: PROGRAM_ERROR Reason: "Invalid DyNumber binary representation" } TxId: 281474976715659 Step: 0 OrderId: 281474976715659 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 482 } } 2025-04-17T17:15:20.540489Z node 2 :TX_PROXY DEBUG: [ReadTable [2:745:2626] TxId# 281474976715658] Received TEvProposeTransactionResult Status# EXEC_ERROR ShardId# 72075186224037888 2025-04-17T17:15:20.540551Z node 2 :TX_PROXY ERROR: [ReadTable [2:745:2626] TxId# 281474976715658] RESPONSE Status# ExecError shard: 72075186224037888 table: /Root/Table 2025-04-17T17:15:20.540868Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:745:2626], Recipient [2:666:2570]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1500 TxId: 281474976715658 >> YdbTableSplit::RenameTablesAndSplit >> DataShardReadTableSnapshots::ReadTableSplitFinished [GOOD] >> YdbTableSplit::SplitByLoadWithDeletes >> DataShardReadTableSnapshots::ReadTableUUID [GOOD] >> DataShardReadTableSnapshots::ReadTableSplitAfter [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::AuthPermissions_Access [GOOD] Test command err: 2025-04-17T17:11:24.478880Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494330886860739029:2088];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:11:24.488574Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/000d48/r3tmp/tmpBisQvJ/pdisk_1.dat 2025-04-17T17:11:25.192824Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:11:25.238433Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:11:25.238521Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:11:25.252618Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15252, node 1 2025-04-17T17:11:26.619892Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:11:26.619909Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:11:26.619915Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:11:26.620000Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22896 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:11:29.154804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:11:29.332718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "Tenant1" } } TxId: 281474976710658 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-17T17:11:29.332960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateExtSubDomain Propose, path/Root/Tenant1, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T17:11:29.333044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: Tenant1, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-17T17:11:29.333171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-04-17T17:11:29.333312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-17T17:11:29.333416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T17:11:29.333433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T17:11:29.333500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-17T17:11:29.333528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-17T17:11:29.337061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusAccepted TxId: 281474976710658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-04-17T17:11:29.337212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: root@builtin, status: StatusAccepted, operation: CREATE DATABASE, path: /Root/Tenant1 2025-04-17T17:11:29.337398Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-17T17:11:29.337412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-17T17:11:29.337514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-17T17:11:29.337580Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-17T17:11:29.337613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7494330891155706907:2391], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 1 2025-04-17T17:11:29.337641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7494330891155706907:2391], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 2 2025-04-17T17:11:29.337698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T17:11:29.337715Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T17:11:29.337745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710658:0, at tablet# 72057594046644480 2025-04-17T17:11:29.337770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710658 ready parts: 1/1 waiting... 2025-04-17T17:11:29.354496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710658 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:11:29.356488Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-17T17:11:29.356607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-17T17:11:29.356618Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-17T17:11:29.356634Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-17T17:11:29.356659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-04-17T17:11:29.357713Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-17T17:11:29.357832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-17T17:11:29.357841Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-17T17:11:29.357855Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2025-04-17T17:11:29.357868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-04-17T17:11:29.357945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710658, ready parts: 0/1, is published: true 2025-04-17T17:11:29.358094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T17:11:29.358106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710658, ready parts: 0/1, is published: true 2025-04-17T17:11:29.358120Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T17:11:29.358191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710658:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710658 msg type: 269090816 2025-04-17T17:11:29.358269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710658, partId: 4294967295, tablet: 72057594046316545 2025-04-17T17:11:29.365302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710658 2025-04-17T17:11:29.365385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710658 2025-04-17T17:11:29.366020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744909889408, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T17:11:29.366121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744909889408 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-17T17:11:29.366157Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710658:0, at tablet# 72057594046644480 2025-04-17T17:11:29.366339Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 240 2025-04-17T17:11:29.366373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710658:0, at tablet# 72057594046644480 2025-04-17T17:11:29.366489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-17T17:11:29.366524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-0 ... DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user6tenant1admin },{ Sid: user2 },{ Sid: user1rootadmin }] Groups: [] } Children [.metadata,Dir1,Dir2,Table0,Tenant1,Tenant2] }] } 2025-04-17T17:15:16.061821Z node 41 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [41:7494331886006076468:2454], row count: 4, finished: 0 2025-04-17T17:15:16.062128Z node 41 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:15:16.062814Z node 41 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata TableId: [72057594046644480:5:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [workload_manager] }] } 2025-04-17T17:15:16.062863Z node 41 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [41:7494331886006076468:2454], row count: 0, finished: 0 2025-04-17T17:15:16.063465Z node 41 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:15:16.069328Z node 41 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager TableId: [72057594046644480:6:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [pools] }] } 2025-04-17T17:15:16.069410Z node 41 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [41:7494331886006076468:2454], row count: 0, finished: 0 2025-04-17T17:15:16.069916Z node 41 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/pools TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:15:16.070263Z node 41 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/pools TableId: [72057594046644480:7:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [default] }] } 2025-04-17T17:15:16.070308Z node 41 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [41:7494331886006076468:2454], row count: 0, finished: 0 2025-04-17T17:15:16.070377Z node 41 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/pools/default TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:15:16.070693Z node 41 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/pools/default TableId: [72057594046644480:8:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindResourcePool DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T17:15:16.070795Z node 41 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [41:7494331886006076468:2454], row count: 6, finished: 0 2025-04-17T17:15:16.070944Z node 41 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:15:16.071452Z node 41 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1 TableId: [72057594046644480:9:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [] }] } 2025-04-17T17:15:16.071503Z node 41 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [41:7494331886006076468:2454], row count: 1, finished: 0 2025-04-17T17:15:16.071859Z node 41 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir2 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:15:16.073793Z node 41 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir2 TableId: [72057594046644480:10:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [] }] } 2025-04-17T17:15:16.073846Z node 41 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [41:7494331886006076468:2454], row count: 0, finished: 0 2025-04-17T17:15:16.075731Z node 41 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:15:16.077512Z node 41 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table0 TableId: [72057594046644480:4:1] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T17:15:16.077561Z node 41 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [41:7494331886006076468:2454], row count: 0, finished: 0 2025-04-17T17:15:16.077614Z node 41 :SYSTEM_VIEWS INFO: Scan finished, actor: [41:7494331886006076468:2454], owner: [41:7494331886006076464:2452], scan id: 0, table id: [72057594046644480:1:0:auth_permissions] 2025-04-17T17:15:16.079200Z node 41 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [41:7494331804401695532:2209], database# , query hash# 12107705915200741666, cpu time# 198383 2025-04-17T17:15:16.079683Z node 41 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744910116052, txId: 281474976710692] shutting down 2025-04-17T17:15:16.122663Z node 43 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:15:16.123582Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 43 2025-04-17T17:15:16.124530Z node 41 :HIVE WARN: HIVE#72057594037968897 Node(43, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T17:15:16.124689Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 44 2025-04-17T17:15:16.125017Z node 41 :HIVE WARN: HIVE#72057594037968897 Node(44, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T17:15:16.127940Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 45 2025-04-17T17:15:16.128913Z node 41 :HIVE WARN: HIVE#72057594037968897 Node(45, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T17:15:16.129071Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 42 2025-04-17T17:15:16.130092Z node 41 :HIVE WARN: HIVE#72057594037968897 Node(42, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T17:15:16.128365Z node 45 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:15:16.136207Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[42:7494331813532772321:2105], Type=268959746 2025-04-17T17:15:16.136253Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[42:7494331813532772321:2105], Type=268959746 2025-04-17T17:15:16.136277Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[42:7494331813532772321:2105], Type=268959746 2025-04-17T17:15:16.136300Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[42:7494331813532772321:2105], Type=268959746 2025-04-17T17:15:16.136323Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[42:7494331813532772321:2105], Type=268959746 2025-04-17T17:15:16.136347Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[42:7494331813532772321:2105], Type=268959746 2025-04-17T17:15:16.147032Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[45:7494331808345790474:2102], Type=268959746 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoTables [GOOD] Test command err: 2025-04-17T17:12:08.981071Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:12:08.982535Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:08.983577Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/0018c0/r3tmp/tmpZqv7xs/pdisk_1.dat 2025-04-17T17:12:11.639070Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9225, node 1 2025-04-17T17:12:14.078883Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:12:14.079181Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:12:14.079448Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:12:14.104956Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:12:14.160094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:12:14.430644Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:12:14.430744Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:12:14.457781Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7932 2025-04-17T17:12:17.210363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:12:32.922731Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T17:12:33.404067Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:12:33.408262Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:12:33.466296Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:12:33.494343Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:12:34.376667Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:12:34.379132Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:12:34.384578Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:12:34.384722Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:12:34.384800Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:12:34.384965Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:12:34.385028Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:12:34.385080Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:12:34.385142Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:12:34.946368Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:12:34.947490Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:12:35.019786Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:12:35.914865Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:12:36.102617Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T17:12:36.103590Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T17:12:36.301537Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T17:12:36.316774Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T17:12:36.316938Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T17:12:36.316979Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T17:12:36.317022Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T17:12:36.317062Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T17:12:36.317104Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T17:12:36.317149Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T17:12:36.317629Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T17:12:36.462035Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T17:12:36.463034Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1870:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T17:12:36.561313Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1880:2605] 2025-04-17T17:12:36.613551Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1906:2620] 2025-04-17T17:12:36.614965Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1906:2620], schemeshard id = 72075186224037897 2025-04-17T17:12:36.714529Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-17T17:12:37.067295Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T17:12:37.067349Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T17:12:37.067410Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T17:12:37.103948Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T17:12:37.117201Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T17:12:37.117283Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T17:12:37.591015Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T17:12:38.754269Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T17:12:38.846657Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T17:12:41.905569Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2228:3067], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:41.905709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:42.008030Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-17T17:12:42.966503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2367:3103], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:43.915537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:43.926482Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2372:3107]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T17:12:43.927547Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T17:12:43.936303Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2374:3109] 2025-04-17T17:12:43.936663Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2374:3109] 2025-04-17T17:12:43.956273Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2375:2870] 2025-04-17T17:12:43.957928Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2374:3109], server id = [2:2375:2870], tablet id = 72075186224037894, status = OK 2025-04-17T17:12:43.959569Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2375:2870], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-17T17:12:43.959942Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-17T17:12:43.973277Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T17:12:43.973990Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2372:3107], StatRequests.size() = 1 2025-04-17T17:12:44.340057Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2379:3113], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:44.351062Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:44.353029Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2384:3118], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:44.466942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-17T17:12:45.264399Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-17T17:12:45.264694Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-17T17:12:45.408375Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2374:3109], schemeshard count = 1 2025-04-17T17:12:45.914350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorAc ... rId = [2:6683:4767], StatRequests.size() = 1 2025-04-17T17:15:11.988846Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-17T17:15:11.989036Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T17:15:11.989431Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T17:15:12.052942Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 3, at schemeshard: 72075186224037897 2025-04-17T17:15:12.053031Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 229.000000s, at schemeshard: 72075186224037897 2025-04-17T17:15:12.053382Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 73 2025-04-17T17:15:12.076022Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-17T17:15:12.757030Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:6716:4783]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T17:15:12.757262Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-04-17T17:15:12.757296Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:6716:4783], StatRequests.size() = 1 2025-04-17T17:15:13.552999Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T17:15:13.553106Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T17:15:13.553174Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 5] is data table. 2025-04-17T17:15:13.553238Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 5] 2025-04-17T17:15:13.553656Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T17:15:13.584730Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-17T17:15:13.596414Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6741:4804], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:15:13.596578Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6751:4809], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:15:13.596730Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:15:13.628490Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-04-17T17:15:13.744977Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6755:4812], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-17T17:15:13.954065Z node 2 :TX_PROXY ERROR: Actor# [2:6853:4859] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:15:14.059823Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:6882:4874]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T17:15:14.060161Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-04-17T17:15:14.060221Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:6882:4874], StatRequests.size() = 1 2025-04-17T17:15:14.275310Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NDBkNTIxNjktNzYyYWNlYmMtYzhiY2NlYWEtMmYzNTBlZjM=, TxId: 2025-04-17T17:15:14.275388Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NDBkNTIxNjktNzYyYWNlYmMtYzhiY2NlYWEtMmYzNTBlZjM=, TxId: 2025-04-17T17:15:14.275974Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T17:15:14.297493Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 5] 2025-04-17T17:15:14.297579Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T17:15:14.883902Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:6917:4893]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T17:15:14.884231Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-04-17T17:15:14.884280Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:6917:4893], StatRequests.size() = 1 2025-04-17T17:15:16.390136Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:6958:4915]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T17:15:16.390462Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-04-17T17:15:16.390506Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:6958:4915], StatRequests.size() = 1 2025-04-17T17:15:17.148865Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T17:15:17.165132Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T17:15:17.165215Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T17:15:17.165281Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-04-17T17:15:17.165319Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-17T17:15:17.165647Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T17:15:17.168725Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-17T17:15:17.190137Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NDk1MjU5OWItYjY4YjIyOWItZWZjZjY4YjUtZDBmMjdlMjg=, TxId: 2025-04-17T17:15:17.190205Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NDk1MjU5OWItYjY4YjIyOWItZWZjZjY4YjUtZDBmMjdlMjg=, TxId: 2025-04-17T17:15:17.190982Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T17:15:17.225734Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-17T17:15:17.225794Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T17:15:17.901794Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:7024:4954]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T17:15:17.902095Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-04-17T17:15:17.902137Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:7024:4954], StatRequests.size() = 1 2025-04-17T17:15:19.387382Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 125 ], ReplyToActorId[ [2:7067:4978]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T17:15:19.387654Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 125 ] 2025-04-17T17:15:19.387700Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 125, ReplyToActorId = [2:7067:4978], StatRequests.size() = 1 2025-04-17T17:15:20.096864Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-17T17:15:20.097245Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T17:15:20.097582Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T17:15:20.109318Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T17:15:20.109371Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T17:15:20.109406Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-04-17T17:15:20.109432Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T17:15:20.109665Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T17:15:20.112007Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-17T17:15:20.129974Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NWYwMTAxNzEtMzgzMGE0ZTAtOGM2ZWZmMjYtZmRiMWJiNTc=, TxId: 2025-04-17T17:15:20.130024Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NWYwMTAxNzEtMzgzMGE0ZTAtOGM2ZWZmMjYtZmRiMWJiNTc=, TxId: 2025-04-17T17:15:20.130376Z node 2 :SYSTEM_VIEWS WARN: [72075186224037891] TEvIntervalQuerySummary, time mismath: node id# 2, interval end# 1970-01-01T00:02:06.000000Z, event interval end# 2025-04-17T17:15:18.000000Z 2025-04-17T17:15:20.130576Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T17:15:20.146937Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T17:15:20.146996Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T17:15:20.801191Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 126 ], ReplyToActorId[ [2:7128:5012]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T17:15:20.801517Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 126 ] 2025-04-17T17:15:20.801573Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 126, ReplyToActorId = [2:7128:5012], StatRequests.size() = 1 2025-04-17T17:15:20.802268Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 127 ], ReplyToActorId[ [2:7130:5014]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T17:15:20.804983Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 127 ] 2025-04-17T17:15:20.805034Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 127, ReplyToActorId = [2:7130:5014], StatRequests.size() = 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableSplitFinished [GOOD] Test command err: 2025-04-17T17:15:14.951264Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:15:14.951779Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:15:14.951836Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/002214/r3tmp/tmpvgoBgd/pdisk_1.dat 2025-04-17T17:15:15.305785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:15:15.347643Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:15:15.396921Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T17:15:15.397735Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-17T17:15:15.397900Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:15:15.398004Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:15:15.418317Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:15:15.507538Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-17T17:15:15.507610Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-17T17:15:15.507770Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-04-17T17:15:15.618254Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-17T17:15:15.618348Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T17:15:15.618860Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T17:15:15.618962Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T17:15:15.619206Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T17:15:15.619358Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T17:15:15.619418Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-17T17:15:15.620680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:15:15.620982Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-04-17T17:15:15.621437Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-17T17:15:15.621489Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-17T17:15:15.656462Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:15:15.657449Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:15:15.657916Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T17:15:15.658243Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:15:15.711154Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:15:15.712017Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:15:15.712144Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:15:15.713734Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:15:15.713802Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:15:15.713863Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:15:15.714197Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:15:15.714318Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:15:15.714407Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T17:15:15.728660Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:15:15.751887Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:15:15.752069Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:15:15.752218Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T17:15:15.752258Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:15:15.752321Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:15:15.752360Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:15:15.752581Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:15:15.752629Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:15:15.752967Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:15:15.753063Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:15:15.753185Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:15:15.753229Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:15:15.753264Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T17:15:15.753330Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:15:15.753368Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:15:15.753398Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:15:15.753471Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:15:15.753610Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:15:15.753641Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:15:15.753680Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T17:15:15.754108Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T17:15:15.754162Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:15:15.754270Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:15:15.754495Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T17:15:15.754558Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:15:15.754650Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:15:15.754694Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:15:15.754736Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T17:15:15.754774Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T17:15:15.754807Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:15:15.755115Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T17:15:15.755176Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T17:15:15.755211Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:15:15.755260Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:15:15.755316Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T17:15:15.755348Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:15:15.755379Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T17:15:15.755409Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:15:15.755435Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T17:15:15.756974Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T17:15:15.757024Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:15:15.768671Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Comple ... 186224037896 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-17T17:15:22.159484Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037896 2025-04-17T17:15:22.159522Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037896 has no attached operations 2025-04-17T17:15:22.159568Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037896 2025-04-17T17:15:22.159619Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037896 2025-04-17T17:15:22.160063Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:1364:3077], Recipient [2:1084:2857]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715664 ShardId: 72075186224037896 2025-04-17T17:15:22.160097Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1084:2857] TxId# 281474976715663] Received TEvStreamQuotaRequest from ShardId# 72075186224037896 2025-04-17T17:15:22.160145Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1084:2857] TxId# 281474976715663] Reserving quota 1 messages for ShardId# 72075186224037896 2025-04-17T17:15:22.160236Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:1364:3077], Recipient [2:1262:2998]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-04-17T17:15:22.160269Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-04-17T17:15:22.160349Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037896, TxId: 281474976715664, MessageQuota: 1 2025-04-17T17:15:22.160623Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037896, TxId: 281474976715664, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-17T17:15:22.160739Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:1364:3077], Recipient [2:1084:2857]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037896 Status: RESPONSE_DATA TxId: 281474976715664 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\006\000\000\000b\005\035B\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 1 DataLastKey: "\001\000\004\000\000\000\006\000\000\000" 2025-04-17T17:15:22.160770Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1084:2857] TxId# 281474976715663] Received stream data from ShardId# 72075186224037896 2025-04-17T17:15:22.160800Z node 2 :TX_PROXY TRACE: [ReadTable [2:1084:2857] TxId# 281474976715663] Sending TEvStreamDataAck to [2:1364:3077] ShardId# 72075186224037896 2025-04-17T17:15:22.160862Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037896, TxId: 281474976715664, PendingAcks: 0 2025-04-17T17:15:22.160927Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:1364:3077], Recipient [2:1084:2857]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715664 ShardId: 72075186224037896 2025-04-17T17:15:22.160955Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1084:2857] TxId# 281474976715663] Received TEvStreamQuotaRequest from ShardId# 72075186224037896 2025-04-17T17:15:22.161273Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287941, Sender [2:1083:2857], Recipient [2:1084:2857]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715663 MessageSizeLimit: 1 ReservedMessages: 1 2025-04-17T17:15:22.161305Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1084:2857] TxId# 281474976715663] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-04-17T17:15:22.161332Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1084:2857] TxId# 281474976715663] Reserving quota 1 messages for ShardId# 72075186224037896 2025-04-17T17:15:22.161362Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037896, TxId: 281474976715664, MessageQuota: 1 2025-04-17T17:15:22.161406Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037896, TxId: 281474976715664, MessageQuota: 1 2025-04-17T17:15:22.161511Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287429, Sender [2:1364:3077], Recipient [2:1084:2857]: NKikimrTx.TEvStreamQuotaRelease TxId: 281474976715664 ShardId: 72075186224037896 2025-04-17T17:15:22.161529Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1084:2857] TxId# 281474976715663] Received TEvStreamQuotaRelease from ShardId# 72075186224037896 2025-04-17T17:15:22.161547Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1084:2857] TxId# 281474976715663] Released quota 1 reserved messages from ShardId# 72075186224037896 2025-04-17T17:15:22.161583Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037896 2025-04-17T17:15:22.161603Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715664, at: 72075186224037896 2025-04-17T17:15:22.161695Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:1262:2998], Recipient [2:1262:2998]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:15:22.161716Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:15:22.161751Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037896 2025-04-17T17:15:22.161772Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037896 active 1 active planned 0 immediate 1 planned 0 2025-04-17T17:15:22.161794Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715664] at 72075186224037896 for ReadTableScan 2025-04-17T17:15:22.161814Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037896 on unit ReadTableScan 2025-04-17T17:15:22.161833Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715664] at 72075186224037896 error: , IsFatalError: 0 2025-04-17T17:15:22.161862Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037896 is Executed 2025-04-17T17:15:22.161911Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037896 executing on unit ReadTableScan 2025-04-17T17:15:22.161942Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037896 to execution unit FinishPropose 2025-04-17T17:15:22.161962Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037896 on unit FinishPropose 2025-04-17T17:15:22.161986Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037896 is DelayComplete 2025-04-17T17:15:22.162004Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037896 executing on unit FinishPropose 2025-04-17T17:15:22.162025Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037896 to execution unit CompletedOperations 2025-04-17T17:15:22.162046Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037896 on unit CompletedOperations 2025-04-17T17:15:22.162073Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037896 is Executed 2025-04-17T17:15:22.162097Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037896 executing on unit CompletedOperations 2025-04-17T17:15:22.162115Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715664] at 72075186224037896 has finished 2025-04-17T17:15:22.162133Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037896 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:15:22.162155Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037896 2025-04-17T17:15:22.162178Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037896 has no attached operations 2025-04-17T17:15:22.162199Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037896 2025-04-17T17:15:22.162235Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037896 2025-04-17T17:15:22.162258Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715664] at 72075186224037896 on unit FinishPropose 2025-04-17T17:15:22.162287Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715664 at tablet 72075186224037896 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-17T17:15:22.162332Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037896 2025-04-17T17:15:22.162494Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:1262:2998], Recipient [2:1084:2857]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037896 Status: COMPLETE TxId: 281474976715664 Step: 0 OrderId: 281474976715664 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037896 CpuTimeUsec: 238 } } 2025-04-17T17:15:22.162515Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1084:2857] TxId# 281474976715663] Received stream complete from ShardId# 72075186224037896 2025-04-17T17:15:22.162561Z node 2 :TX_PROXY INFO: [ReadTable [2:1084:2857] TxId# 281474976715663] RESPONSE Status# ExecComplete prepare time: 0.016130s execute time: 0.659579s total time: 0.675709s 2025-04-17T17:15:22.162869Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1084:2857], Recipient [2:886:2709]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 2025-04-17T17:15:22.163016Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1084:2857], Recipient [2:994:2791]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 2025-04-17T17:15:22.163263Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1084:2857], Recipient [2:999:2793]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 2025-04-17T17:15:22.163508Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1084:2857], Recipient [2:1260:2996]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 2025-04-17T17:15:22.163857Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:1367:3080], Recipient [2:1149:2914]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:15:22.163891Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:15:22.163940Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037893, clientId# [2:1365:3078], serverId# [2:1367:3080], sessionId# [0:0:0] 2025-04-17T17:15:22.164016Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1084:2857], Recipient [2:1262:2998]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 2025-04-17T17:15:22.164265Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1084:2857], Recipient [2:1149:2914]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 2025-04-17T17:15:22.164406Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:1368:3081], Recipient [2:1153:2916]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:15:22.164439Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:15:22.164469Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:1366:3079], serverId# [2:1368:3081], sessionId# [0:0:0] 2025-04-17T17:15:22.164570Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1084:2857], Recipient [2:1153:2916]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 >> DataShardReadTableSnapshots::ReadTableMaxRows [GOOD] >> Cdc::RacyRebootAndSplitWithTxInflight [GOOD] >> Cdc::RacyActivateAndEnqueue >> YdbTableSplit::SplitByLoadWithReads |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableUUID [GOOD] Test command err: 2025-04-17T17:15:15.080169Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:15:15.080670Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:15:15.080724Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/002267/r3tmp/tmpAwRbJ5/pdisk_1.dat 2025-04-17T17:15:15.539199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:15:15.584980Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:15:15.624044Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T17:15:15.624916Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-17T17:15:15.625131Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:15:15.625236Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:15:15.636633Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:15:15.723621Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-17T17:15:15.723711Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-17T17:15:15.723870Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-04-17T17:15:15.844406Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-17T17:15:15.844482Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T17:15:15.844860Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T17:15:15.844944Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T17:15:15.845146Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T17:15:15.845272Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T17:15:15.845341Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-17T17:15:15.846576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:15:15.846900Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-04-17T17:15:15.847347Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-17T17:15:15.847398Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-17T17:15:15.875980Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:15:15.876998Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:15:15.877447Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T17:15:15.877719Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:15:15.922130Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:15:15.922908Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:15:15.923032Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:15:15.924826Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:15:15.924903Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:15:15.924994Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:15:15.925357Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:15:15.925497Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:15:15.925570Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T17:15:15.936361Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:15:15.966290Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:15:15.966468Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:15:15.966575Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T17:15:15.966609Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:15:15.966640Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:15:15.966673Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:15:15.966873Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:15:15.966917Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:15:15.967199Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:15:15.967294Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:15:15.967401Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:15:15.967436Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:15:15.967492Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T17:15:15.967565Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:15:15.967607Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:15:15.967646Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:15:15.967706Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:15:15.967831Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:15:15.967861Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:15:15.967899Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T17:15:15.968244Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T17:15:15.968295Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:15:15.968376Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:15:15.968545Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T17:15:15.968600Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:15:15.968688Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:15:15.968730Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:15:15.968763Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T17:15:15.968806Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T17:15:15.968840Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:15:15.969091Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T17:15:15.969127Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T17:15:15.969155Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:15:15.969195Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:15:15.969243Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T17:15:15.969268Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:15:15.969298Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T17:15:15.969333Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:15:15.969356Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T17:15:15.970588Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T17:15:15.970634Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:15:15.981414Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Comple ... _DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearancePending 2025-04-17T17:15:22.416676Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287940, Sender [2:864:2693], Recipient [2:666:2570]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715662 Cleared: true 2025-04-17T17:15:22.416713Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2025-04-17T17:15:22.416813Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:666:2570], Recipient [2:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:15:22.416847Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:15:22.416912Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:15:22.416943Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-17T17:15:22.416985Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715662] at 72075186224037888 for WaitForStreamClearance 2025-04-17T17:15:22.417017Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit WaitForStreamClearance 2025-04-17T17:15:22.417054Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715662] at 72075186224037888 2025-04-17T17:15:22.417083Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2025-04-17T17:15:22.417106Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit WaitForStreamClearance 2025-04-17T17:15:22.417132Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037888 to execution unit ReadTableScan 2025-04-17T17:15:22.417153Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit ReadTableScan 2025-04-17T17:15:22.417306Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is Continue 2025-04-17T17:15:22.417325Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-17T17:15:22.417346Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-17T17:15:22.417379Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:15:22.417405Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:15:22.417455Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:15:22.417853Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:876:2704], Recipient [2:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-04-17T17:15:22.417879Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-04-17T17:15:22.417927Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2025-04-17T17:15:22.418165Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-04-17T17:15:22.418201Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037888 2025-04-17T17:15:22.418247Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2025-04-17T17:15:22.418376Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715662, Size: 38, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-17T17:15:22.418451Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received stream data from ShardId# 72075186224037888 2025-04-17T17:15:22.418508Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715662, PendingAcks: 0 2025-04-17T17:15:22.418543Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2025-04-17T17:15:22.418812Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-04-17T17:15:22.418833Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037888 2025-04-17T17:15:22.418855Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2025-04-17T17:15:22.418891Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715662, Size: 38, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-17T17:15:22.418925Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received stream data from ShardId# 72075186224037888 2025-04-17T17:15:22.418961Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715662, PendingAcks: 0 2025-04-17T17:15:22.418990Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2025-04-17T17:15:22.419164Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-04-17T17:15:22.419191Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037888 2025-04-17T17:15:22.419223Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2025-04-17T17:15:22.419259Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715662, Size: 38, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-17T17:15:22.419295Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received stream data from ShardId# 72075186224037888 2025-04-17T17:15:22.419328Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715662, PendingAcks: 0 2025-04-17T17:15:22.419355Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2025-04-17T17:15:22.419526Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-04-17T17:15:22.419548Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037888 2025-04-17T17:15:22.419593Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2025-04-17T17:15:22.419661Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2025-04-17T17:15:22.419796Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-17T17:15:22.419829Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715662, at: 72075186224037888 2025-04-17T17:15:22.419885Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received TEvStreamQuotaRelease from ShardId# 72075186224037888 2025-04-17T17:15:22.419914Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Released quota 1 reserved messages from ShardId# 72075186224037888 2025-04-17T17:15:22.420031Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:666:2570], Recipient [2:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:15:22.420064Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:15:22.421448Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:15:22.421504Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-17T17:15:22.421556Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715662] at 72075186224037888 for ReadTableScan 2025-04-17T17:15:22.421589Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit ReadTableScan 2025-04-17T17:15:22.421626Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715662] at 72075186224037888 error: , IsFatalError: 0 2025-04-17T17:15:22.421664Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2025-04-17T17:15:22.421693Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit ReadTableScan 2025-04-17T17:15:22.421723Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:15:22.421757Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit FinishPropose 2025-04-17T17:15:22.421785Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is DelayComplete 2025-04-17T17:15:22.421806Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:15:22.421839Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T17:15:22.421864Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit CompletedOperations 2025-04-17T17:15:22.421940Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2025-04-17T17:15:22.421959Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T17:15:22.421977Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715662] at 72075186224037888 has finished 2025-04-17T17:15:22.422017Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:15:22.422053Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-17T17:15:22.422079Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:15:22.422107Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:15:22.422169Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:15:22.422226Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715662] at 72075186224037888 on unit FinishPropose 2025-04-17T17:15:22.422269Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715662 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-17T17:15:22.422334Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:15:22.422491Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received stream complete from ShardId# 72075186224037888 2025-04-17T17:15:22.422572Z node 2 :TX_PROXY INFO: [ReadTable [2:864:2693] TxId# 281474976715661] RESPONSE Status# ExecComplete prepare time: 0.013069s execute time: 0.106334s total time: 0.119403s 2025-04-17T17:15:22.422861Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:864:2693], Recipient [2:666:2570]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715661 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableSplitAfter [GOOD] Test command err: 2025-04-17T17:15:15.130558Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:15:15.131089Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:15:15.131142Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/002231/r3tmp/tmpMZQn83/pdisk_1.dat 2025-04-17T17:15:15.511242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:15:15.573747Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:15:15.612413Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T17:15:15.613340Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-17T17:15:15.613536Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:15:15.613663Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:15:15.625146Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:15:15.706799Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-17T17:15:15.706864Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-17T17:15:15.707024Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-04-17T17:15:15.846421Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-17T17:15:15.846505Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T17:15:15.846999Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T17:15:15.847099Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T17:15:15.847370Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T17:15:15.847552Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T17:15:15.847636Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-17T17:15:15.849199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:15:15.849567Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-04-17T17:15:15.850160Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-17T17:15:15.850226Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-17T17:15:15.906978Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:15:15.908019Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:15:15.908455Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T17:15:15.908732Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:15:15.977949Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:15:15.978764Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:15:15.978872Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:15:15.980574Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:15:15.980643Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:15:15.980696Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:15:15.981056Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:15:15.981211Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:15:15.981289Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T17:15:15.992567Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:15:16.055440Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:15:16.055625Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:15:16.055758Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T17:15:16.055790Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:15:16.055826Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:15:16.055859Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:15:16.056066Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:15:16.057046Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:15:16.057386Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:15:16.057496Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:15:16.057607Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:15:16.057645Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:15:16.057681Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T17:15:16.057733Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:15:16.057765Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:15:16.057793Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:15:16.057845Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:15:16.057960Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:15:16.058005Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:15:16.058046Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T17:15:16.058412Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T17:15:16.058462Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:15:16.058550Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:15:16.058726Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T17:15:16.058796Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:15:16.058875Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:15:16.058916Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:15:16.058948Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T17:15:16.058978Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T17:15:16.059003Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:15:16.059288Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T17:15:16.059335Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T17:15:16.059368Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:15:16.059407Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:15:16.059454Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T17:15:16.059477Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:15:16.059506Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T17:15:16.059534Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:15:16.059560Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T17:15:16.061273Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T17:15:16.061333Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:15:16.072397Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Comple ... 90 has no attached operations 2025-04-17T17:15:22.449605Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-04-17T17:15:22.449651Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-17T17:15:22.449948Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:1001:2799], Recipient [2:864:2693]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715662 ShardId: 72075186224037890 2025-04-17T17:15:22.449970Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2025-04-17T17:15:22.449991Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037890 2025-04-17T17:15:22.450047Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715662, MessageQuota: 1 2025-04-17T17:15:22.450254Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:1001:2799], Recipient [2:904:2724]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-04-17T17:15:22.450285Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-04-17T17:15:22.450416Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715662, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-17T17:15:22.450532Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:1001:2799], Recipient [2:864:2693]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715662 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\003\000\000\000b\005\035!\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 1 DataLastKey: "\001\000\004\000\000\000\003\000\000\000" 2025-04-17T17:15:22.450567Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received stream data from ShardId# 72075186224037890 2025-04-17T17:15:22.450594Z node 2 :TX_PROXY TRACE: [ReadTable [2:864:2693] TxId# 281474976715661] Sending TEvStreamDataAck to [2:1001:2799] ShardId# 72075186224037890 2025-04-17T17:15:22.450681Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715662, PendingAcks: 0 2025-04-17T17:15:22.450744Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:1001:2799], Recipient [2:864:2693]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715662 ShardId: 72075186224037890 2025-04-17T17:15:22.450772Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2025-04-17T17:15:22.451070Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287941, Sender [2:863:2693], Recipient [2:864:2693]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715661 MessageSizeLimit: 1 ReservedMessages: 1 2025-04-17T17:15:22.451097Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-04-17T17:15:22.451115Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037890 2025-04-17T17:15:22.451144Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715662, MessageQuota: 1 2025-04-17T17:15:22.451206Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715662, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-17T17:15:22.451289Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:1001:2799], Recipient [2:864:2693]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715662 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\004\000\000\000b\005\035,\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 2 DataLastKey: "\001\000\004\000\000\000\004\000\000\000" 2025-04-17T17:15:22.451312Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received stream data from ShardId# 72075186224037890 2025-04-17T17:15:22.451333Z node 2 :TX_PROXY TRACE: [ReadTable [2:864:2693] TxId# 281474976715661] Sending TEvStreamDataAck to [2:1001:2799] ShardId# 72075186224037890 2025-04-17T17:15:22.451373Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715662, PendingAcks: 0 2025-04-17T17:15:22.451415Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:1001:2799], Recipient [2:864:2693]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715662 ShardId: 72075186224037890 2025-04-17T17:15:22.451430Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2025-04-17T17:15:22.451595Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287941, Sender [2:863:2693], Recipient [2:864:2693]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715661 MessageSizeLimit: 1 ReservedMessages: 1 2025-04-17T17:15:22.451621Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-04-17T17:15:22.451655Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037890 2025-04-17T17:15:22.451690Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715662, MessageQuota: 1 2025-04-17T17:15:22.451725Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037890, TxId: 281474976715662, MessageQuota: 1 2025-04-17T17:15:22.451830Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287429, Sender [2:1001:2799], Recipient [2:864:2693]: NKikimrTx.TEvStreamQuotaRelease TxId: 281474976715662 ShardId: 72075186224037890 2025-04-17T17:15:22.451855Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received TEvStreamQuotaRelease from ShardId# 72075186224037890 2025-04-17T17:15:22.452260Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Released quota 1 reserved messages from ShardId# 72075186224037890 2025-04-17T17:15:22.452338Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037890 2025-04-17T17:15:22.452366Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715662, at: 72075186224037890 2025-04-17T17:15:22.452510Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:904:2724], Recipient [2:904:2724]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:15:22.452563Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:15:22.452609Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-17T17:15:22.452630Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2025-04-17T17:15:22.452655Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715662] at 72075186224037890 for ReadTableScan 2025-04-17T17:15:22.452674Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037890 on unit ReadTableScan 2025-04-17T17:15:22.452695Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715662] at 72075186224037890 error: , IsFatalError: 0 2025-04-17T17:15:22.452721Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037890 is Executed 2025-04-17T17:15:22.452741Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037890 executing on unit ReadTableScan 2025-04-17T17:15:22.452758Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037890 to execution unit FinishPropose 2025-04-17T17:15:22.452776Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037890 on unit FinishPropose 2025-04-17T17:15:22.452799Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037890 is DelayComplete 2025-04-17T17:15:22.452815Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037890 executing on unit FinishPropose 2025-04-17T17:15:22.452833Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037890 to execution unit CompletedOperations 2025-04-17T17:15:22.452850Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037890 on unit CompletedOperations 2025-04-17T17:15:22.452887Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037890 is Executed 2025-04-17T17:15:22.452902Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037890 executing on unit CompletedOperations 2025-04-17T17:15:22.452915Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715662] at 72075186224037890 has finished 2025-04-17T17:15:22.452933Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:15:22.452948Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037890 2025-04-17T17:15:22.452966Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-04-17T17:15:22.452984Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-04-17T17:15:22.453018Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-17T17:15:22.453038Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715662] at 72075186224037890 on unit FinishPropose 2025-04-17T17:15:22.453060Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715662 at tablet 72075186224037890 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-17T17:15:22.453101Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-17T17:15:22.453247Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:904:2724], Recipient [2:864:2693]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: COMPLETE TxId: 281474976715662 Step: 0 OrderId: 281474976715662 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 303 } } 2025-04-17T17:15:22.453270Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received stream complete from ShardId# 72075186224037890 2025-04-17T17:15:22.453920Z node 2 :TX_PROXY INFO: [ReadTable [2:864:2693] TxId# 281474976715661] RESPONSE Status# ExecComplete prepare time: 0.014363s execute time: 0.356678s total time: 0.371041s 2025-04-17T17:15:22.454250Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:864:2693], Recipient [2:666:2570]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715661 2025-04-17T17:15:22.454404Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:864:2693], Recipient [2:899:2722]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715661 2025-04-17T17:15:22.454541Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:864:2693], Recipient [2:904:2724]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715661 >> TopicService::OneConsumer_TheRangesDoNotOverlap [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableMaxRows [GOOD] Test command err: 2025-04-17T17:15:15.243774Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:15:15.244275Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:15:15.244331Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/002257/r3tmp/tmps3UYbx/pdisk_1.dat 2025-04-17T17:15:15.676000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:15:15.725564Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:15:15.765424Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T17:15:15.766354Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-17T17:15:15.766523Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:15:15.766634Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:15:15.781249Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:15:15.877713Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-17T17:15:15.877777Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-17T17:15:15.877951Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-04-17T17:15:16.031333Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-17T17:15:16.031415Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T17:15:16.031887Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T17:15:16.031980Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T17:15:16.036486Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T17:15:16.036706Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T17:15:16.036804Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-17T17:15:16.038420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:15:16.038840Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-04-17T17:15:16.039409Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-17T17:15:16.039472Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-17T17:15:16.085101Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:15:16.086254Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:15:16.086693Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T17:15:16.086900Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:15:16.211406Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:15:16.212217Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:15:16.212332Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:15:16.213864Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:15:16.213936Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:15:16.213983Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:15:16.224393Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:15:16.224563Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:15:16.224643Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T17:15:16.236680Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:15:16.336912Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:15:16.337104Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:15:16.337208Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T17:15:16.337243Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:15:16.337271Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:15:16.337301Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:15:16.337498Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:15:16.337538Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:15:16.337829Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:15:16.337914Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:15:16.338013Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:15:16.338045Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:15:16.338077Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T17:15:16.338132Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:15:16.338165Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:15:16.338203Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:15:16.338243Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:15:16.338357Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:15:16.338386Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:15:16.338423Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T17:15:16.338767Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T17:15:16.338823Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:15:16.338923Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:15:16.339100Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T17:15:16.339158Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:15:16.339238Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:15:16.339281Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:15:16.339313Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T17:15:16.339340Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T17:15:16.339383Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:15:16.339626Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T17:15:16.339676Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T17:15:16.339708Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:15:16.339750Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:15:16.339797Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T17:15:16.339824Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:15:16.339859Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T17:15:16.339886Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:15:16.339911Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T17:15:16.341303Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T17:15:16.341354Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:15:16.356929Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Comple ... planned 0 immediate 1 planned 0 2025-04-17T17:15:23.306384Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715663] at 72075186224037890 for WaitForStreamClearance 2025-04-17T17:15:23.306398Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037890 on unit WaitForStreamClearance 2025-04-17T17:15:23.306420Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715663] at 72075186224037890 2025-04-17T17:15:23.306454Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037890 is Executed 2025-04-17T17:15:23.306472Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037890 executing on unit WaitForStreamClearance 2025-04-17T17:15:23.306487Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715663] at 72075186224037890 to execution unit ReadTableScan 2025-04-17T17:15:23.306502Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037890 on unit ReadTableScan 2025-04-17T17:15:23.306630Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037890 is Continue 2025-04-17T17:15:23.306645Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-17T17:15:23.306666Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037890 2025-04-17T17:15:23.306694Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-04-17T17:15:23.306717Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-04-17T17:15:23.306750Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-17T17:15:23.307097Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:1004:2803], Recipient [2:888:2711]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-04-17T17:15:23.307131Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-04-17T17:15:23.307198Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:1004:2803], Recipient [2:974:2775]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715663 ShardId: 72075186224037890 2025-04-17T17:15:23.307225Z node 2 :TX_PROXY DEBUG: [ReadTable [2:974:2775] TxId# 281474976715662] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2025-04-17T17:15:23.307283Z node 2 :TX_PROXY DEBUG: [ReadTable [2:974:2775] TxId# 281474976715662] Reserving quota 1 messages for ShardId# 72075186224037890 ... observed row limit of 2 rows at [2:1004:2803] 2025-04-17T17:15:23.307370Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715663, MessageQuota: 1 2025-04-17T17:15:23.307578Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715663, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-17T17:15:23.307690Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:1004:2803], Recipient [2:974:2775]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715663 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\004\000\000\000b\005\035,\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 1 DataLastKey: "\001\000\004\000\000\000\004\000\000\000" 2025-04-17T17:15:23.307708Z node 2 :TX_PROXY DEBUG: [ReadTable [2:974:2775] TxId# 281474976715662] Received stream data from ShardId# 72075186224037890 2025-04-17T17:15:23.307723Z node 2 :TX_PROXY TRACE: [ReadTable [2:974:2775] TxId# 281474976715662] Sending TEvStreamDataAck to [2:1004:2803] ShardId# 72075186224037890 2025-04-17T17:15:23.307753Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715663, PendingAcks: 0 2025-04-17T17:15:23.307806Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:1004:2803], Recipient [2:974:2775]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715663 ShardId: 72075186224037890 2025-04-17T17:15:23.307820Z node 2 :TX_PROXY DEBUG: [ReadTable [2:974:2775] TxId# 281474976715662] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2025-04-17T17:15:23.308004Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287941, Sender [2:973:2775], Recipient [2:974:2775]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715662 MessageSizeLimit: 1 ReservedMessages: 1 2025-04-17T17:15:23.308023Z node 2 :TX_PROXY DEBUG: [ReadTable [2:974:2775] TxId# 281474976715662] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-04-17T17:15:23.308037Z node 2 :TX_PROXY DEBUG: [ReadTable [2:974:2775] TxId# 281474976715662] Reserving quota 1 messages for ShardId# 72075186224037890 ... observed row limit of 1 rows at [2:1004:2803] 2025-04-17T17:15:23.308066Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715663, MessageQuota: 1 2025-04-17T17:15:23.308173Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715663, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-17T17:15:23.308332Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:1004:2803], Recipient [2:974:2775]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715663 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\005\000\000\000b\005\0357\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 2 DataLastKey: "\001\000\004\000\000\000\005\000\000\000" 2025-04-17T17:15:23.308357Z node 2 :TX_PROXY DEBUG: [ReadTable [2:974:2775] TxId# 281474976715662] Received stream data from ShardId# 72075186224037890 2025-04-17T17:15:23.308390Z node 2 :TX_PROXY TRACE: [ReadTable [2:974:2775] TxId# 281474976715662] Sending TEvStreamDataAck to [2:1004:2803] ShardId# 72075186224037890 2025-04-17T17:15:23.308453Z node 2 :TX_PROXY INFO: [ReadTable [2:974:2775] TxId# 281474976715662] RESPONSE Status# ExecComplete prepare time: 0.019753s execute time: 0.182437s total time: 0.202190s 2025-04-17T17:15:23.308757Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715663, PendingAcks: 0 2025-04-17T17:15:23.308808Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037890, TxId: 281474976715663, MessageQuota: 0 2025-04-17T17:15:23.309027Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:974:2775], Recipient [2:886:2709]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715662 2025-04-17T17:15:23.309210Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037890 2025-04-17T17:15:23.309234Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715663, at: 72075186224037890 2025-04-17T17:15:23.309381Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:888:2711], Recipient [2:888:2711]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:15:23.309406Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:15:23.309440Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-17T17:15:23.309464Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2025-04-17T17:15:23.309489Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715663] at 72075186224037890 for ReadTableScan 2025-04-17T17:15:23.309512Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037890 on unit ReadTableScan 2025-04-17T17:15:23.309543Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715663] at 72075186224037890 error: , IsFatalError: 0 2025-04-17T17:15:23.309572Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037890 is Executed 2025-04-17T17:15:23.309595Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037890 executing on unit ReadTableScan 2025-04-17T17:15:23.309628Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715663] at 72075186224037890 to execution unit FinishPropose 2025-04-17T17:15:23.309651Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037890 on unit FinishPropose 2025-04-17T17:15:23.309677Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037890 is DelayComplete 2025-04-17T17:15:23.309697Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037890 executing on unit FinishPropose 2025-04-17T17:15:23.309717Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715663] at 72075186224037890 to execution unit CompletedOperations 2025-04-17T17:15:23.309738Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037890 on unit CompletedOperations 2025-04-17T17:15:23.309768Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037890 is Executed 2025-04-17T17:15:23.309786Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037890 executing on unit CompletedOperations 2025-04-17T17:15:23.309816Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715663] at 72075186224037890 has finished 2025-04-17T17:15:23.309837Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:15:23.309855Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037890 2025-04-17T17:15:23.309879Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-04-17T17:15:23.309898Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-04-17T17:15:23.309945Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-17T17:15:23.309974Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715663] at 72075186224037890 on unit FinishPropose 2025-04-17T17:15:23.310001Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715663 at tablet 72075186224037890 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-17T17:15:23.310056Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-17T17:15:23.310251Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549569, Sender [2:974:2775], Recipient [2:888:2711]: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976715663 2025-04-17T17:15:23.310278Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvCancelTransactionProposal 2025-04-17T17:15:23.310311Z node 2 :TX_DATASHARD DEBUG: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037890 txId 281474976715663 2025-04-17T17:15:23.310353Z node 2 :TX_DATASHARD DEBUG: Start TTxCancelTransactionProposal at tablet 72075186224037890 txId 281474976715663 2025-04-17T17:15:23.310465Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287431, Sender [2:974:2775], Recipient [2:888:2711]: NKikimrTx.TEvInterruptTransaction TxId: 281474976715663 2025-04-17T17:15:23.310492Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvInterruptTransaction 2025-04-17T17:15:23.310562Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:974:2775], Recipient [2:888:2711]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715662 >> Cdc::Alter [GOOD] >> Cdc::AddColumn >> TConsoleTests::TestAttributesExtSubdomain [GOOD] >> TConsoleTests::TestDatabaseQuotas >> SystemView::PartitionStatsTtlFields [GOOD] >> SystemView::PartitionStatsLocksFields |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TestKinesisHttpProxy::TestRequestNoAuthorization [GOOD] |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> SystemView::ShowCreateTableKeyBloomFilter [GOOD] >> SystemView::ShowCreateTable |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TopicService::OneConsumer_TheRangesOverlap >> TPersQueueTest::SchemeshardRestart [GOOD] >> TPersQueueTest::SameOffset |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TestKinesisHttpProxy::TestUnauthorizedPutRecords >> DemoTx::Scenario_1 [GOOD] |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |77.0%| [TA] $(B)/ydb/core/tx/datashard/ut_read_table/test-results/unittest/{meta.json ... results_accumulator.log} |77.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |77.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |77.0%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_table/test-results/unittest/{meta.json ... results_accumulator.log} |77.0%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TPersQueueTest::BadTopic [GOOD] >> TPersQueueTest::CloseActiveWriteSessionOnClusterDisable >> DemoTx::Scenario_2 >> TPersQueueTest::WriteExisting [GOOD] >> TPersQueueTest::WriteExistingBigValue >> TPersQueueTest::DirectReadPreCached [GOOD] >> TPersQueueTest::DirectReadNotCached |77.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |77.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |77.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError [GOOD] >> TPersQueueCommonTest::Auth_MultipleUpdateTokenRequestIterationsWithValidToken_GotUpdateTokenResponseForEachRequest >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_BTreeIndex |77.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |77.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |77.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |77.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TPersQueueTest::UpdatePartitionLocation [GOOD] >> TPersQueueTest::TopicServiceCommitOffset >> KqpStats::SysViewCancelled [GOOD] >> TopicAutoscaling::PartitionSplit_DistributedTxCommit_CheckOffsetCommitForDifferentCases_SplitedTopic [GOOD] |77.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TConsoleTests::TestDatabaseQuotas [GOOD] >> TConsoleTests::TestDatabaseQuotasBadOverallQuota >> TTxDataShardMiniKQL::WriteAndReadMany [GOOD] >> SystemView::ShowCreateTableTtlSettings [GOOD] >> SystemView::ShowCreateTableTemporary >> TPersQueueTest::SetupLockSession2 [GOOD] >> TPersQueueTest::SetupLockSession |77.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |77.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |77.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |77.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |77.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::SysViewCancelled [GOOD] Test command err: Trying to start YDB, gRPC: 27784, MsgBus: 6256 2025-04-17T17:14:23.176459Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494331655310258192:2272];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:23.179260Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/002451/r3tmp/tmpWpOh6H/pdisk_1.dat 2025-04-17T17:14:23.606906Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27784, node 1 2025-04-17T17:14:23.640766Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:14:23.640867Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:14:23.644433Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:14:23.718776Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:14:23.718798Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:14:23.718806Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:14:23.718941Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6256 TClient is connected to server localhost:6256 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:14:24.366587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:24.381027Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:14:24.394640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:24.558782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:24.727544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:24.824428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:26.559858Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331668195161633:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:26.559969Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:26.834628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:14:26.879868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:14:26.916327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:14:26.945519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:14:26.983485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:14:27.018801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:14:27.092949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:14:27.189931Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331672490129596:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:27.190014Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:27.197775Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331672490129601:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:27.201257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:14:27.218007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494331672490129603:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:14:27.290212Z node 1 :TX_PROXY ERROR: Actor# [1:7494331672490129658:3549] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:14:28.183982Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494331655310258192:2272];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:28.185812Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Plans":[{"E-Size":"No estimate","PlanNodeId":5,"LookupKeyColumns":["Key"],"Node Type":"TableLookup","Path":"\/Root\/TwoShard","Columns":["Key","Value1","Value2"],"E-Rows":"No estimate","Table":"TwoShard","Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/KeyValue","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"Stage","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"FirstMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"Bytes":{"Count":1,"Sum":5,"Max":5,"Min":5}},"Name":"4","Push":{"WaitTimeUs":{"Count":1,"Sum":4358,"Max":4358,"Min":4358},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResumeMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"FirstMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4}}}],"DurationUs":{"Count":1,"Sum":1000,"Max":1000,"Min":1000},"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576},"Tasks":1,"OutputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"FinishedTasks":1,"IngressRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"PhysicalStageId":0,"StageDurationUs":1000,"Table":[{"Path":"\/Root\/KeyValue","ReadRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"ReadBytes":{"Count":1,"Sum":16,"Max":16,"Min":16}}],"BaseTimeMs":1744910068661,"OutputBytes":{"Count":1,"Sum":5,"Max":5,"Min":5},"CpuTimeUs":{"Count":1,"Sum":808,"Max":808,"Min":808},"Ingress":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"FirstMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"Bytes":{"Count":1,"Sum":32,"Max":32,"Min":32}},"External":{},"Name":"KqpReadRangesSource","Ingress":{},"Push":{"LastMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"FirstMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"Bytes":{"Count":1,"Sum":32,"Max":32,"Min":32},"WaitTimeUs":{"Count":1,"Sum":4425,"Max":4425,"Min":4425},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1}}}],"UpdateTimeMs":5}}],"Node Type":"HashShuffle","KeyColumns":["Key"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":3}],"Name":"PartitionByKey","Input":"NarrowMap"}],"Node Type":"Aggregate","Stats":{"UseLlvm":"undefined","OutputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"PhysicalStageId":1,"FinishedTasks":1,"InputBytes":{"Count":1,"Sum":5,"Max":5,"Min":5},"DurationUs":{"Count":1,"Sum":1000,"Max":1000,"Min":1000},"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576},"BaseTimeMs":1744910068661,"Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"FirstMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"Bytes":{"Count":1,"Sum":5,"Max":5,"Min":5}},"Name":"RESULT","Push":{"LastMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"Rows":{"Count":1,"Sum":2,"Max":2,"Min" ... ","QueuedTimeUs":725},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Operators":[{"E-Rows":"No estimate","Columns":["Key","Value1","Value2"],"E-Size":"No estimate","E-Cost":"No estimate","Name":"TableLookup","Table":"TwoShard","LookupKeyColumns":["Key"]}],"Node Type":"TableLookup","PlanNodeType":"Connection"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Operators":[{"A-Rows":2,"A-SelfCpu":7.482,"A-Cpu":7.482,"A-Size":18,"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"A-Rows":2,"A-SelfCpu":0.659,"A-Cpu":8.141,"A-Size":18,"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query"}} Trying to start YDB, gRPC: 29573, MsgBus: 14833 2025-04-17T17:14:36.007790Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494331711179757197:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:36.007850Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/002451/r3tmp/tmpNrvied/pdisk_1.dat 2025-04-17T17:14:36.113849Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:14:36.145521Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:14:36.145595Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:14:36.146485Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29573, node 3 2025-04-17T17:14:36.208650Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:14:36.208677Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:14:36.208682Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:14:36.208795Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14833 TClient is connected to server localhost:14833 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:14:36.704281Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:36.710776Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T17:14:36.727529Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:36.836593Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:37.070431Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:37.152083Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:39.622382Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494331724064660841:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:39.622460Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:39.694370Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T17:14:39.730999Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T17:14:39.776155Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T17:14:39.823126Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T17:14:39.860810Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T17:14:39.906840Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T17:14:39.948967Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T17:14:40.027786Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494331728359628796:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:40.027893Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:40.028218Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494331728359628801:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:14:40.033162Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T17:14:40.050871Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494331728359628803:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T17:14:40.142712Z node 3 :TX_PROXY ERROR: Actor# [3:7494331728359628862:3543] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:14:41.008569Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494331711179757197:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:14:41.008634Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:14:41.161630Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:14:51.088637Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:14:51.088669Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:15:27.527218Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744910127434, txId: 281474976715673] shutting down 2025-04-17T17:15:28.441861Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7494331934518061032:2778] TxId: 281474976715675. Ctx: { TraceId: 01js2bz56h4jhj00rvt8bfsjwh, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YzRkMDUzNjYtZDEwNDMxNDMtMjA1ZmE5YWMtYmFkZmY5MjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 100ms } {
: Error: Cancelling after 104ms during execution } ] 2025-04-17T17:15:28.442634Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7494331934518061054:2800], TxId: 281474976715675, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=YzRkMDUzNjYtZDEwNDMxNDMtMjA1ZmE5YWMtYmFkZmY5MjE=. TraceId : 01js2bz56h4jhj00rvt8bfsjwh. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7494331934518061032:2778], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-17T17:15:28.443155Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7494331934518061063:2808], TxId: 281474976715675, task: 9. Ctx: { SessionId : ydb://session/3?node_id=3&id=YzRkMDUzNjYtZDEwNDMxNDMtMjA1ZmE5YWMtYmFkZmY5MjE=. TraceId : 01js2bz56h4jhj00rvt8bfsjwh. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7494331934518061032:2778], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-17T17:15:28.443793Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YzRkMDUzNjYtZDEwNDMxNDMtMjA1ZmE5YWMtYmFkZmY5MjE=, ActorId: [3:7494331925928126381:2778], ActorState: ExecuteState, TraceId: 01js2bz56h4jhj00rvt8bfsjwh, Create QueryResponse for error on request, msg:
: Error: Request canceled after 100ms
: Error: Cancelling after 104ms during execution 2025-04-17T17:15:28.744789Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744910128736, txId: 281474976715677] shutting down |77.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |77.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |77.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |77.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-1 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-31 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-2 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-4 >> TestKinesisHttpProxy::TestUnauthorizedPutRecords [GOOD] >> Cdc::RacyActivateAndEnqueue [GOOD] >> Cdc::RacyCreateAndSend >> TPersQueueTest::ReadFromSeveralPartitions [GOOD] >> TPersQueueTest::ReadFromSeveralPartitionsMigrated ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::PartitionSplit_DistributedTxCommit_CheckOffsetCommitForDifferentCases_SplitedTopic [GOOD] Test command err: 2025-04-17T17:12:10.293101Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494331086887089266:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:12:10.294330Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:12:14.353033Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6vg8/0017a9/r3tmp/tmpPVfnm1/pdisk_1.dat 2025-04-17T17:12:15.053431Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:15.287267Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494331086887089266:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:12:15.287324Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:12:16.774549Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:16.900444Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:17.920997Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:17.921024Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:19.098009Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:12:19.143857Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:19.143881Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:12:19.802184Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:12:19.802918Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:12:19.969508Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64212, node 1 2025-04-17T17:12:20.605759Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6vg8/0017a9/r3tmp/yandexkK4Z4S.tmp 2025-04-17T17:12:20.606149Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6vg8/0017a9/r3tmp/yandexkK4Z4S.tmp 2025-04-17T17:12:20.606939Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6vg8/0017a9/r3tmp/yandexkK4Z4S.tmp 2025-04-17T17:12:20.607027Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:12:21.271823Z INFO: TTestServer started on Port 24434 GrpcPort 64212 TClient is connected to server localhost:24434 PQClient connected to localhost:64212 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:12:28.820358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:12:29.158975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:12:30.179543Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:12:34.084900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:12:34.084921Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:12:38.287341Z node 1 :KQP_PROXY ERROR: TraceId: "01js2bsqen270ps51vk91jm30b", Request deadline has expired for 3.021954s seconds 2025-04-17T17:12:38.937945Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331207146174462:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:38.968750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:39.020695Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494331207146174478:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:12:39.203951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-17T17:12:39.479740Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 2025-04-17T17:12:39.480148Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494331211441141776:2371], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-17T17:12:39.704279Z node 1 :TX_PROXY ERROR: Actor# [1:7494331211441141842:2506] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:12:40.818030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:12:40.984828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:12:41.370601Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494331211441141852:2379], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:12:41.372116Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzNjODg0YTAtZDE3ZWI2MmEtOWQ4NTRmMTYtZTY1ZDY0YWU=, ActorId: [1:7494331207146174460:2364], ActorState: ExecuteState, TraceId: 01js2bsz4d7s5t1hesqsz8nmbx, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:12:41.375455Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:12:42.287692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:12:42.808733Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494331224326043978:2411], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:12:42.810025Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjRkODNkNzMtZmZjZmIzZjItM2RiZjU5NTctOTU2MDUzODY=, ActorId: [1:7494331224326043976:2410], ActorState: ExecuteState, TraceId: 01js2bt3bwfg4a0ssyhbjyw50b, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:12:42.829669Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7494331241505913302:2734] === CheckClustersList. Ok 2025-04-17T17:12:52.422384Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-17T17:12:52.514167Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-17T17:12:52.516442Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:74943312672757173 ... ind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } 2025-04-17T17:15:29.606411Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188536 (NKikimr::TEvPQ::TEvSubDomainStatus), Tablet [6:7494331909754738624:2863], Partition 2, Sender [6:7494331909754738624:2863], Recipient [6:7494331909754738705:2870], Cookie: 0 2025-04-17T17:15:29.606445Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188536, Sender [6:7494331909754738624:2863], Recipient [6:7494331909754738705:2870]: NKikimrPQ.TEvSubDomainStatus SubDomainOutOfSpace: false 2025-04-17T17:15:29.606458Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvSubDomainStatus 2025-04-17T17:15:29.606512Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 271188503, Sender [6:7494331909754738707:2872], Recipient [6:7494331909754738625:2864]: NKikimr::TEvPQ::TEvPartitionLabeledCounters 2025-04-17T17:15:29.606536Z node 6 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvPartitionLabeledCounters 2025-04-17T17:15:29.606669Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 271188503, Sender [6:7494331909754738705:2870], Recipient [6:7494331909754738624:2863]: NKikimr::TEvPQ::TEvPartitionLabeledCounters 2025-04-17T17:15:29.606683Z node 6 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvPartitionLabeledCounters 2025-04-17T17:15:29.607130Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Send TEvPeriodicTopicStats PathId: 13 Generation: 1 StatsReportRound: 15 DataSize: 0 UsedReserveSize: 0 2025-04-17T17:15:29.607273Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] ProcessPendingStats. PendingUpdates size 3 2025-04-17T17:15:29.607520Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271188001, Sender [6:7494331875394998636:2460], Recipient [6:7494331823855389940:2146]: NKikimrPQ.TEvPeriodicTopicStats PathId: 13 Generation: 1 Round: 15 DataSize: 0 UsedReserveSize: 0 SubDomainOutOfSpace: false 2025-04-17T17:15:29.607553Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPersQueue::TEvPeriodicTopicStats 2025-04-17T17:15:29.607574Z node 6 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046644480, LocalPathId: 13] DataSize 0 UsedReserveSize 0 2025-04-17T17:15:29.607602Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTopicStats on# 0.099995s, queue# 1 2025-04-17T17:15:29.642669Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [6:7494331875394998636:2460], Recipient [6:7494331823855389940:2146]: NKikimrSchemeOp.TDescribePath PathId: 13 SchemeshardId: 72057594046644480 2025-04-17T17:15:29.642730Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-17T17:15:29.658533Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494331909754738625:2864], Partition 1, Sender [0:0:0], Recipient [6:7494331909754738707:2872], Cookie: 0 2025-04-17T17:15:29.658623Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494331909754738707:2872]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:29.658655Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:29.658705Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:15:29.658776Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:15:29.658805Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:15:29.658839Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:15:29.658891Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494331875394998641:2461], Partition 0, Sender [0:0:0], Recipient [6:7494331875394998698:2464], Cookie: 0 2025-04-17T17:15:29.658931Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494331875394998698:2464]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:29.658944Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:29.658970Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:15:29.659004Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:15:29.659021Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:15:29.659042Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:15:29.659087Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494331909754738624:2863], Partition 2, Sender [0:0:0], Recipient [6:7494331909754738705:2870], Cookie: 0 2025-04-17T17:15:29.659134Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494331909754738705:2870]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:29.659147Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:29.659170Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:15:29.659201Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:15:29.659218Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:15:29.659237Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:15:29.708614Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435095, Sender [0:0:0], Recipient [6:7494331823855389940:2146]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTopicStats 2025-04-17T17:15:29.708687Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTopicStats 2025-04-17T17:15:29.708706Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-17T17:15:29.708722Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-17T17:15:29.708825Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTopicStats on# 0.000000s, queue# 1 2025-04-17T17:15:29.709341Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435095, Sender [0:0:0], Recipient [6:7494331823855389940:2146]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTopicStats 2025-04-17T17:15:29.709371Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTopicStats 2025-04-17T17:15:29.709393Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-17T17:15:29.759868Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494331909754738625:2864], Partition 1, Sender [0:0:0], Recipient [6:7494331909754738707:2872], Cookie: 0 2025-04-17T17:15:29.759969Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494331909754738707:2872]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:29.760011Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:29.760071Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:15:29.760166Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:15:29.760199Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:15:29.760238Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:15:29.760318Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494331875394998641:2461], Partition 0, Sender [0:0:0], Recipient [6:7494331875394998698:2464], Cookie: 0 2025-04-17T17:15:29.760357Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494331875394998698:2464]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:29.760373Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:29.760399Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:15:29.760437Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:15:29.760457Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:15:29.760478Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:15:29.760528Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494331909754738624:2863], Partition 2, Sender [0:0:0], Recipient [6:7494331909754738705:2870], Cookie: 0 2025-04-17T17:15:29.760566Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494331909754738705:2870]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:29.760581Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:15:29.760606Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:15:29.760637Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:15:29.760657Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:15:29.760679Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:15:29.811530Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188544 (NKikimr::NPQ::NReadQuoterEvents::TEvQuotaCountersUpdated), Tablet [6:7494331875394998641:2461], Partition 0, Sender [6:7494331875394998701:2466], Recipient [6:7494331875394998698:2464], Cookie: 0 2025-04-17T17:15:29.811660Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188544, Sender [6:7494331875394998701:2466], Recipient [6:7494331875394998698:2464]: NKikimr::NPQ::NReadQuoterEvents::TEvQuotaCountersUpdated 2025-04-17T17:15:29.811694Z node 6 :PERSQUEUE TRACE: StateIdle, processing event NReadQuoterEvents::TEvQuotaCountersUpdated >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-25 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-37 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-25 >> Cdc::AddColumn [GOOD] >> Cdc::AddColumn_TopicAutoPartitioning >> TConsoleTests::TestDatabaseQuotasBadOverallQuota [GOOD] >> TConsoleTests::TestDatabaseQuotasBadStorageQuota >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-43 >> TestKinesisHttpProxy::TestWrongStream >> TCdcStreamTests::MeteringServerless [GOOD] >> TCdcStreamTests::MeteringDedicated ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::WriteAndReadMany [GOOD] Test command err: 2025-04-17T17:10:20.860128Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:10:20.872625Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:10:20.873012Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:126:2151] 2025-04-17T17:10:20.873198Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:10:20.955753Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:10:20.985789Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:10:21.429692Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:10:21.459133Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-17T17:10:21.459757Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-17T17:10:21.460159Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-17T17:10:21.462645Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:10:21.464015Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:10:21.465745Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:189:2151] in generation 2 2025-04-17T17:10:21.467248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:10:21.467500Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:10:21.767914Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:10:22.039593Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-17T17:10:22.044945Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:10:22.045374Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-04-17T17:10:22.045736Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-17T17:10:22.045921Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-17T17:10:22.046141Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:10:22.047483Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:10:22.047679Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:10:22.052908Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-17T17:10:22.053683Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-17T17:10:22.054166Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T17:10:22.054381Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:10:22.054776Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-17T17:10:22.055147Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T17:10:22.055340Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T17:10:22.055891Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-17T17:10:22.060238Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T17:10:22.061240Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:10:22.061455Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:10:22.061864Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-04-17T17:10:22.086440Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:126:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-17T17:10:22.086524Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:10:22.087305Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T17:10:22.096433Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-17T17:10:22.096662Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-17T17:10:22.096869Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-17T17:10:22.096916Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T17:10:22.097108Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-17T17:10:22.097448Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-17T17:10:22.097637Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T17:10:22.099670Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-17T17:10:22.099834Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-17T17:10:22.102773Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-17T17:10:22.102991Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-17T17:10:22.103254Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-17T17:10:22.103479Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-17T17:10:22.103876Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-17T17:10:22.104080Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-17T17:10:22.104102Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-17T17:10:22.129202Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T17:10:22.129608Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T17:10:22.129816Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-17T17:10:22.129850Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-17T17:10:22.130246Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-17T17:10:22.134285Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:10:22.134815Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:10:22.135022Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-04-17T17:10:22.135839Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:126:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-17T17:10:22.135864Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:10:22.143086Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-17T17:10:22.143348Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T17:10:22.143931Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-17T17:10:22.143970Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-17T17:10:22.208212Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-17T17:10:22.208662Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:10:22.210117Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:10:22.210156Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:10:22.210558Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T17:10:22.210595Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:10:22.210628Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T17:10:22.210662Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-17T17:10:22.210693Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-17T17:10:22.211078Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T17:10:22.211435Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-17T17:10:22.211946Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-17T17:10:22.216414Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-17T17:10:22.217474Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-17T17:10:22.217696Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T17:10:22.217725Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-17T17:10:22.217753Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-17T17:10:22.217778Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-17T17:10:22.218015Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T17:10:22.218211Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-17T17:10:22.218606Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T17:10:22.218808Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T17:10:22.218850Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-17T17:10:22.219514Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-17T17:10:22.219751Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-17T17:10:22.224278Z node 1 :TX_DATA ... c latency: 58 ms, propose latency: 58 ms, status: COMPLETE 2025-04-17T17:15:19.502710Z node 3 :TX_DATASHARD TRACE: Execution status for [0:10] at 9437184 is DelayComplete 2025-04-17T17:15:19.502736Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 9437184 executing on unit FinishPropose 2025-04-17T17:15:19.502759Z node 3 :TX_DATASHARD TRACE: Add [0:10] at 9437184 to execution unit CompletedOperations 2025-04-17T17:15:19.502780Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:10] at 9437184 on unit CompletedOperations 2025-04-17T17:15:19.502819Z node 3 :TX_DATASHARD TRACE: Execution status for [0:10] at 9437184 is Executed 2025-04-17T17:15:19.502841Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 9437184 executing on unit CompletedOperations 2025-04-17T17:15:19.502864Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:10] at 9437184 has finished 2025-04-17T17:15:19.527690Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T17:15:19.527759Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:10] at 9437184 on unit FinishPropose 2025-04-17T17:15:19.527813Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:15:23.147524Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [3:99:2134], Recipient [3:232:2225]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 99 RawX2: 12884904022 } 2025-04-17T17:15:23.147596Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2025-04-17T17:15:23.147993Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:647:2622], Recipient [3:232:2225]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:15:23.148032Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:15:23.148070Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:646:2621], serverId# [3:647:2622], sessionId# [0:0:0] 2025-04-17T17:15:23.148309Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:99:2134], Recipient [3:232:2225]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 99 RawX2: 12884904022 } TxBody: "\032\354\002\037\010\0021\010key1\010key2\nvalue\005\205\n\205\002\205\004\206\205\006\207\203\004\207\203\001H\207\203\001H\006\n\016\203\014\020List$Truncated\002\205\004\205\002?\022\002\205\000\034MyReads MyWrites\205\004\205\002?\022\002\206\202\024Reply\024Write?\030\205\002\206\203\010\002 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\024)\211\026?\022\203\005\004\200\205\006\203\004\203\004\203\004\006\n\016\213\004\203\004\207\203\001H\213\002\203\004\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?* h\020\000\000\000\000\000\000\016\000\000\000\000\000\000\000?\014\005?2\003?,D\003?.F\003?0p\007\013?:\003?4\000\'?8\003\013?>\003?<\003j\030\001\003?@\000\003?B\000\003?D\007\240%&\003?F\000\006\004?J\003\203\014\000\003\203\014\000\003\003?L\000\377\007\002\000\005?\032\005?\026?x\000\005?\030\003\005? \005?\034?x\000\006\ 2025-04-17T17:15:23.148345Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:15:23.148430Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T17:15:23.149768Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit CheckDataTx 2025-04-17T17:15:23.168083Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Executed 2025-04-17T17:15:23.168291Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 9437184 executing on unit CheckDataTx 2025-04-17T17:15:23.168334Z node 3 :TX_DATASHARD TRACE: Add [0:11] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T17:15:23.168374Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T17:15:23.168438Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-17T17:15:23.168512Z node 3 :TX_DATASHARD TRACE: Activated operation [0:11] at 9437184 2025-04-17T17:15:23.168552Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Executed 2025-04-17T17:15:23.168581Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-17T17:15:23.168611Z node 3 :TX_DATASHARD TRACE: Add [0:11] at 9437184 to execution unit ExecuteDataTx 2025-04-17T17:15:23.168639Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-04-17T17:15:23.173994Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2025-04-17T17:15:23.174212Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-04-17T17:15:23.174258Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-04-17T17:15:23.262433Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T17:15:23.262511Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-04-17T17:15:23.263160Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-04-17T17:15:23.268132Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2025-04-17T17:15:23.268309Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-04-17T17:15:23.268363Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-04-17T17:15:23.427625Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T17:15:23.427707Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-04-17T17:15:23.428423Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-04-17T17:15:23.463523Z node 3 :TX_DATASHARD TRACE: Operation [0:11] at 9437184 exceeded memory limit 4194304 and requests 33554432 more for the next try 2025-04-17T17:15:23.463791Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-04-17T17:15:23.463833Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-04-17T17:15:23.464265Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T17:15:23.464306Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-04-17T17:15:23.464872Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-04-17T17:15:23.630328Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2025-04-17T17:15:23.631066Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-04-17T17:15:23.631125Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-04-17T17:15:23.834049Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T17:15:23.834117Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-04-17T17:15:23.834740Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-04-17T17:15:24.126681Z node 3 :TX_DATASHARD TRACE: Operation [0:11] at 9437184 exceeded memory limit 37748736 and requests 301989888 more for the next try 2025-04-17T17:15:24.127561Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-04-17T17:15:24.127614Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-04-17T17:15:24.309329Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T17:15:24.309421Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-04-17T17:15:24.310202Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-04-17T17:15:24.314888Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2025-04-17T17:15:24.315095Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-04-17T17:15:24.315147Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-04-17T17:15:24.350154Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T17:15:24.350219Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-04-17T17:15:24.350861Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-04-17T17:15:24.352456Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2025-04-17T17:15:24.352594Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-04-17T17:15:24.352634Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-04-17T17:15:24.369692Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T17:15:24.369763Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-04-17T17:15:24.370467Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-04-17T17:15:24.375242Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2025-04-17T17:15:24.375421Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-04-17T17:15:24.375466Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-04-17T17:15:24.890756Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T17:15:24.890826Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-04-17T17:15:24.891424Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-04-17T17:15:26.088946Z node 3 :TX_DATASHARD TRACE: Executed operation [0:11] at tablet 9437184 with status COMPLETE 2025-04-17T17:15:26.089068Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:11] at 9437184: {NSelectRow: 0, NSelectRange: 1, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 129871, SelectRangeBytes: 40000268, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-17T17:15:26.089144Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Executed 2025-04-17T17:15:26.089185Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 9437184 executing on unit ExecuteDataTx 2025-04-17T17:15:26.089225Z node 3 :TX_DATASHARD TRACE: Add [0:11] at 9437184 to execution unit FinishPropose 2025-04-17T17:15:26.089262Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit FinishPropose 2025-04-17T17:15:26.089315Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 11 at tablet 9437184 send to client, exec latency: 62 ms, propose latency: 62 ms, status: COMPLETE 2025-04-17T17:15:26.089469Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is DelayComplete 2025-04-17T17:15:26.089500Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 9437184 executing on unit FinishPropose 2025-04-17T17:15:26.089529Z node 3 :TX_DATASHARD TRACE: Add [0:11] at 9437184 to execution unit CompletedOperations 2025-04-17T17:15:26.089558Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit CompletedOperations 2025-04-17T17:15:26.089607Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Executed 2025-04-17T17:15:26.089631Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 9437184 executing on unit CompletedOperations 2025-04-17T17:15:26.089657Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:11] at 9437184 has finished 2025-04-17T17:15:26.122271Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T17:15:26.122353Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:11] at 9437184 on unit FinishPropose 2025-04-17T17:15:26.122421Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-1